diff --git a/collector/compile-benchmarks/README.md b/collector/compile-benchmarks/README.md index bfd2b7c86..b59648a7a 100644 --- a/collector/compile-benchmarks/README.md +++ b/collector/compile-benchmarks/README.md @@ -140,6 +140,9 @@ Rust code being written today. - **encoding**: An old crate providing character encoding support. Contains some large tables. +- **cargo**: An old version of Cargo, corresponding to the 1.24.0 Rust release. + Two of its dependencies (`socket2` and `url`) had to be vendored, to provide patches + so that it can compile with old rustc. - **futures**: v0.1.0 of the popular `futures` crate, which was used by many Rust programs. Newer versions of this crate (e.g. v0.3.21 from February 2021) contain very little code, instead relying on sub-crates. This makes them less diff --git a/collector/compile-benchmarks/REUSE.toml b/collector/compile-benchmarks/REUSE.toml index e16ba1dc9..e038422e1 100644 --- a/collector/compile-benchmarks/REUSE.toml +++ b/collector/compile-benchmarks/REUSE.toml @@ -27,6 +27,21 @@ path = "bitmaps-3.2.1-new-solver/**" SPDX-License-Identifier = "MPL-2.0" SPDX-FileCopyrightText = "Bodil Stokke" +[[annotations]] +path = "cargo/**" +SPDX-FileCopyrightText = "The Rust Project Developers (see https://thanks.rust-lang.org)" +SPDX-License-Identifier = "MIT OR Apache-2.0" + +[[annotations]] +path = "cargo/socket2-0.2.3/**" +SPDX-FileCopyrightText = "Alex Crichton" +SPDX-License-Identifier = "MIT OR Apache-2.0" + +[[annotations]] +path = "cargo/url-1.5.1/**" +SPDX-FileCopyrightText = "The rust-url developers" +SPDX-License-Identifier = "MIT OR Apache-2.0" + [[annotations]] path = "cargo-0.87.1/**" SPDX-FileCopyrightText = "The Rust Project Developers (see https://thanks.rust-lang.org)" diff --git a/collector/compile-benchmarks/cargo/.gitignore b/collector/compile-benchmarks/cargo/.gitignore new file mode 100644 index 000000000..df490beb2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/.gitignore @@ -0,0 +1,14 @@ +/target +.cargo +/config.stamp +/Makefile +/config.mk +src/doc/build +src/etc/*.pyc +src/registry/target +src/registry/Cargo.lock +rustc +__pycache__ +.idea/ +*.iml +*.swp diff --git a/collector/compile-benchmarks/cargo/.travis.yml b/collector/compile-benchmarks/cargo/.travis.yml new file mode 100644 index 000000000..c2f7ca36c --- /dev/null +++ b/collector/compile-benchmarks/cargo/.travis.yml @@ -0,0 +1,60 @@ +language: rust +rust: stable +sudo: required +dist: trusty + +git: + depth: 1 + +cache: + directories: + - $HOME/.cargo/bin/ + +matrix: + include: + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + - env: TARGET=x86_64-apple-darwin + ALT=i686-apple-darwin + os: osx + + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + rust: beta + + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + rust: nightly + install: + - mdbook --help || cargo install mdbook --force + script: + - cargo test + - cargo doc --no-deps + - sh src/ci/dox.sh + after_success: | + [ $TRAVIS_BRANCH = master ] && + [ $TRAVIS_PULL_REQUEST = false ] && + [ $(uname -s) = Linux ] && + pip install ghp-import --user && + $HOME/.local/bin/ghp-import -n target/doc && + git push -qf https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages 2>&1 >/dev/null + + exclude: + - rust: stable + +before_script: + - rustup target add $ALT +script: + - cargo test + +env: + global: + - secure: "hWheSLilMM4DXChfSy2XsDlLw338X2o+fw8bE590xxU2TzngFW8GUfq7lGfZEp/l4SNNIS6ROU/igyttCZtxZMANZ4aMQZR5E8Fp4yPOyE1pZLDH/LdQVXnROsfburQJeq+GIYIbZ01Abzh5ClpgLg5KX0H627uj063zZ7Ljo/w=" + +notifications: + email: + on_success: never +addons: + apt: + packages: + - gcc-multilib diff --git a/collector/compile-benchmarks/cargo/0-println.patch b/collector/compile-benchmarks/cargo/0-println.patch new file mode 100644 index 000000000..d3cff140e --- /dev/null +++ b/collector/compile-benchmarks/cargo/0-println.patch @@ -0,0 +1,13 @@ +diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs +index f20118b8..350d8e47 100755 +--- a/src/cargo/lib.rs ++++ b/src/cargo/lib.rs +@@ -98,6 +98,8 @@ impl fmt::Display for VersionInfo { + } + }; + ++ println!("testing"); ++ + if let Some(ref cfg) = self.cfg_info { + if let Some(ref ci) = cfg.commit_info { + write!(f, " ({} {})", diff --git a/collector/compile-benchmarks/cargo/ARCHITECTURE.md b/collector/compile-benchmarks/cargo/ARCHITECTURE.md new file mode 100644 index 000000000..11e3e1218 --- /dev/null +++ b/collector/compile-benchmarks/cargo/ARCHITECTURE.md @@ -0,0 +1,90 @@ +# Cargo Architecture + +This document gives a high level overview of Cargo internals. You may +find it useful if you want to contribute to Cargo or if you are +interested in the inner workings of Cargo. + + +## Subcommands + +Cargo is organized as a set of subcommands. All subcommands live in +`src/bin` directory. However, only `src/bin/cargo.rs` file produces an +executable, other files inside the `bin` directory are submodules. See +`src/bin/cargo.rs` for how these subcommands get wired up with the +main executable. + +A typical subcommand, such as `src/bin/build.rs`, parses command line +options, reads the configuration files, discovers the Cargo project in +the current directory and delegates the actual implementation to one +of the functions in `src/cargo/ops/mod.rs`. This short file is a good +place to find out about most of the things that Cargo can do. + + +## Important Data Structures + +There are some important data structures which are used throughout +Cargo. + +`Config` is available almost everywhere and holds "global" +information, such as `CARGO_HOME` or configuration from +`.cargo/config` files. The `shell` method of `Config` is the entry +point for printing status messages and other info to the console. + +`Workspace` is the description of the workspace for the current +working directory. Each workspace contains at least one +`Package`. Each package corresponds to a single `Cargo.toml`, and may +define several `Target`s, such as the library, binaries, integration +test or examples. Targets are crates (each target defines a crate +root, like `src/lib.rs` or `examples/foo.rs`) and are what is actually +compiled by `rustc`. + +A typical package defines the single library target and several +auxiliary ones. Packages are a unit of dependency in Cargo, and when +package `foo` depends on package `bar`, that means that each target +from `foo` needs the library target from `bar`. + +`PackageId` is the unique identifier of a (possibly remote) +package. It consist of three components: name, version and source +id. Source is the place where the source code for package comes +from. Typical sources are crates.io, a git repository or a folder on +the local hard drive. + +`Resolve` is the representation of a directed acyclic graph of package +dependencies, which uses `PackageId`s for nodes. This is the data +structure that is saved to the lock file. If there is no lockfile, +Cargo constructs a resolve by finding a graph of packages which +matches declared dependency specification according to semver. + + +## Persistence + +Cargo is a non-daemon command line application, which means that all +the information used by Cargo must be persisted on the hard drive. The +main sources of information are `Cargo.toml` and `Cargo.lock` files, +`.cargo/config` configuration files and the globally shared registry +of packages downloaded from crates.io, usually located at +`~/.cargo/registry`. See `src/sources/registry` for the specifics of +the registry storage format. + + +## Concurrency + +Cargo is mostly single threaded. The only concurrency inside a single +instance of Cargo happens during compilation, when several instances +of `rustc` are invoked in parallel to build independent +targets. However there can be several different instances of Cargo +process running concurrently on the system. Cargo guarantees that this +is always safe by using file locks when accessing potentially shared +data like the registry or the target directory. + + +## Tests + +Cargo has an impressive test suite located in the `tests` folder. Most +of the test are integration: a project structure with `Cargo.toml` and +rust source code is created in a temporary directory, `cargo` binary +is invoked via `std::process::Command` and then stdout and stderr are +verified against the expected output. To simplify testing, several +macros of the form `[MACRO]` are used in the expected output. For +example, `[..]` matches any string and `[/]` matches `/` on Unixes and +`\` on windows. diff --git a/collector/compile-benchmarks/cargo/CONTRIBUTING.md b/collector/compile-benchmarks/cargo/CONTRIBUTING.md new file mode 100644 index 000000000..eb7374ee6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/CONTRIBUTING.md @@ -0,0 +1,173 @@ +# Contributing to Cargo + +Thank you for your interest in contributing to Cargo! Good places to +start are this document, [ARCHITECTURE.md](ARCHITECTURE.md), which +describes the high-level structure of Cargo and [E-easy] bugs on the +issue tracker. + +If you have a general question about Cargo or it's internals, feel free to ask +on [IRC]. + +## Code of Conduct + +All contributors are expected to follow our [Code of Conduct]. + +## Bug reports + +We can't fix what we don't know about, so please report problems liberally. This +includes problems with understanding the documentation, unhelpful error messages +and unexpected behavior. + +**If you think that you have identified an issue with Cargo that might compromise +its users' security, please do not open a public issue on GitHub. Instead, +we ask you to refer to Rust's [security policy].** + +Opening an issue is as easy as following [this +link][new-issues] and filling out the fields. +Here's a template that you can use to file an issue, though it's not necessary to +use it exactly: + + + + I tried this: + + I expected to see this happen: + + Instead, this happened: + + I'm using + +All three components are important: what you did, what you expected, what +happened instead. Please use https://gist.github.com/ if your examples run long. + +## Working on issues + +If you're looking for somewhere to start, check out the [E-easy][E-Easy] tag. + +Feel free to ask for guidelines on how to tackle a problem on [IRC] or open a +[new issue][new-issues]. This is especially important if you want to add new +features to Cargo or make large changes to the already existing code-base. +Cargo's core developers will do their best to provide help. + +If you start working on an already-filed issue, post a comment on this issue to +let people know that somebody is working it. Feel free to ask for comments if +you are unsure about the solution you would like to submit. + +While Cargo does make use of some Rust-features available only through the +`nightly` toolchain, it must compile on stable Rust. Code added to Cargo +is encouraged to make use of the latest stable features of the language and +`stdlib`. + +We use the "fork and pull" model [described here][development-models], where +contributors push changes to their personal fork and create pull requests to +bring those changes into the source repository. This process is partly +automated: Pull requests are made against Cargo's master-branch, tested and +reviewed. Once a change is approved to be merged, a friendly bot merges the +changes into an internal branch, runs the full test-suite on that branch +and only then merges into master. This ensures that Cargo's master branch +passes the test-suite at all times. + +Your basic steps to get going: + +* Fork Cargo and create a branch from master for the issue you are working on. +* Please adhere to the code style that you see around the location you are +working on. +* [Commit as you go][githelp]. +* Include tests that cover all non-trivial code. The existing tests +in `test/` provide templates on how to test Cargo's behavior in a +sandbox-environment. The internal crate `cargotest` provides a vast amount +of helpers to minimize boilerplate. +* Make sure `cargo test` passes. If you do not have the cross-compilers +installed locally, ignore the cross-compile test failures or disable them by +using `CFG_DISABLE_CROSS_TESTS=1 cargo test`. Note that some tests are enabled +only on `nightly` toolchain. If you can, test both toolchains. +* Push your commits to GitHub and create a pull request against Cargo's +`master` branch. + +## Pull requests + +After the pull request is made, a friendly bot will automatically assign a +reviewer; the review-process will make sure that the proposed changes are +sound. Please give the assigned reviewer sufficient time, especially during +weekends. If you don't get a reply, you may poke the core developers on [IRC]. + +A merge of Cargo's master-branch and your changes is immediately queued +to be tested after the pull request is made. In case unforeseen +problems are discovered during this step (e.g. a failure on a platform you +originally did not develop on), you may ask for guidance. Push additional +commits to your branch to tackle these problems. + +The reviewer might point out changes deemed necessary. Please add them as +extra commits; this ensures that the reviewer can see what has changed since +the code was previously reviewed. Large or tricky changes may require several +passes of review and changes. + +Once the reviewer approves your pull request, a friendly bot picks it up +and [merges][mergequeue] it into Cargo's `master` branch. + +## Contributing to the documentation + +To contribute to the documentation, all you need to do is change the markdown +files in the `src/doc` directory. To view the rendered version of changes you +have made locally, run: + +```sh +sh src/ci/dox.sh +open target/doc/index.html +``` + + +## Issue Triage + +Sometimes an issue will stay open, even though the bug has been fixed. And +sometimes, the original bug may go stale because something has changed in the +meantime. + +It can be helpful to go through older bug reports and make sure that they are +still valid. Load up an older issue, double check that it's still true, and +leave a comment letting us know if it is or is not. The [least recently +updated sort][lru] is good for finding issues like this. + +Contributors with sufficient permissions on the Rust-repository can help by +adding labels to triage issues: + +* Yellow, **A**-prefixed labels state which **area** of the project an issue + relates to. + +* Magenta, **B**-prefixed labels identify bugs which are **blockers**. + +* Light purple, **C**-prefixed labels represent the **category** of an issue. + +* Dark purple, **Command**-prefixed labels mean the issue has to do with a + specific cargo command. + +* Green, **E**-prefixed labels explain the level of **experience** or + **effort** necessary to fix the issue. [**E-mentor**][E-mentor] issues also + have some instructions on how to get started. + +* Red, **I**-prefixed labels indicate the **importance** of the issue. The + [I-nominated][inom] label indicates that an issue has been nominated for + prioritizing at the next triage meeting. + +* Purple gray, **O**-prefixed labels are the **operating system** or platform + that this issue is specific to. + +* Orange, **P**-prefixed labels indicate a bug's **priority**. These labels + are only assigned during triage meetings and replace the [I-nominated][inom] + label. + +* The light orange **relnotes** label marks issues that should be documented in + the release notes of the next release. + + +[githelp]: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html +[development-models]: https://help.github.com/articles/about-collaborative-development-models/ +[gist]: https://gist.github.com/ +[new-issues]: https://github.com/rust-lang/cargo/issues/new +[mergequeue]: https://buildbot2.rust-lang.org/homu/queue/cargo +[security policy]: https://www.rust-lang.org/security.html +[lru]: https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-asc +[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy +[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor +[Code of Conduct]: https://www.rust-lang.org/conduct.html +[IRC]: https://kiwiirc.com/client/irc.mozilla.org/cargo diff --git a/collector/compile-benchmarks/cargo/Cargo.lock b/collector/compile-benchmarks/cargo/Cargo.lock new file mode 100644 index 000000000..714bd6e1d --- /dev/null +++ b/collector/compile-benchmarks/cargo/Cargo.lock @@ -0,0 +1,1185 @@ +[root] +name = "cargo" +version = "0.23.0" +dependencies = [ + "atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cargotest 0.1.0", + "core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "crates-io 0.12.0", + "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", + "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "semver 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "advapi32-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "aho-corasick" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "aho-corasick" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "atty" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "backtrace" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "backtrace-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "backtrace-sys" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bitflags" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bufstream" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cargotest" +version = "0.1.0" +dependencies = [ + "cargo 0.23.0", + "filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", + "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cc" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cfg-if" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cmake" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "commoncrypto" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "commoncrypto-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "conv" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "core-foundation" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "core-foundation-sys" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crates-io" +version = "0.12.0" +dependencies = [ + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "crossbeam" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "crypto-hash" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "curl" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "curl-sys" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "custom_derive" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "dbghelp-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "docopt" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "dtoa" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "env_logger" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "error-chain" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "backtrace 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "filetime" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "flate2" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fnv" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foreign-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fs2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "git2" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "git2-curl" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "glob" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "globset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hamcrest" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hex" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "home" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "idna" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ignore" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "globset 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itoa" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "jobserver" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "lazy_static" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libgit2-sys" +version = "0.6.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libssh2-sys" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libz-sys" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "log" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "magenta" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "magenta-sys" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "matches" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "memchr" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "memchr" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "miniz-sys" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "miow" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "net2" +version = "0.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-complex 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-rational 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-bigint" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-complex" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-integer" +version = "0.1.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-iter" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-rational" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-traits" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "num_cpus" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "openssl" +version = "0.9.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "openssl-probe" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "openssl-sys" +version = "0.9.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "percent-encoding" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "pkg-config" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "psapi-sys" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "quote" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "redox_syscall" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "redox_termios" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex" +version = "0.1.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex-syntax" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "regex-syntax" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rustc-demangle" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "same-file" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "scoped-tls" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "scopeguard" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "semver" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde_derive" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive_internals 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_derive_internals" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_ignored" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_json" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "shell-escape" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "socket2" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "strsim" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "syn" +version = "0.11.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "synom" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tar" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tempdir" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "termcolor" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "wincolor 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "termion" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread-id" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread_local" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread_local" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "toml" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-xid" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unreachable" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "url" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "userenv-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "utf8-ranges" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "utf8-ranges" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "vcpkg" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "walkdir" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "wincolor" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a" +"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66" +"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" +"checksum atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "21e50800ec991574876040fff8ee46b136a53e985286fbe6a3bdfe6421b78860" +"checksum backtrace 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "99f2ce94e22b8e664d95c57fff45b98a966c2252b60691d0b7aeeccd88d70983" +"checksum backtrace-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "c63ea141ef8fdb10409d0f5daf30ac51f84ef43bff66f16627773d2a292cd189" +"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" +"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" +"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32" +"checksum cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7db2f146208d7e0fbee761b09cd65a7f51ccc38705d4e7262dad4d73b12a76b1" +"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" +"checksum cmake 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "357c07e7a1fc95732793c1edb5901e1a1f305cfcf63a90eb12dbd22bdb6b789d" +"checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" +"checksum commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" +"checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299" +"checksum core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5909502e547762013619f4c4e01cc7393c20fe2d52d7fa471c1210adb2320dc7" +"checksum core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bc9fb3d6cb663e6fd7cf1c63f9b144ee2b1e4a78595a0451dd34bff85b9a3387" +"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" +"checksum crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8837ab96533202c5b610ed44bc7f4183e7957c1c8f56e8cc78bb098593c8ba0a" +"checksum crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34903878eec1694faf53cae8473a088df333181de421d4d3d48061d6559fe602" +"checksum curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7034c534a1d7d22f7971d6088aa9d281d219ef724026c3428092500f41ae9c2c" +"checksum curl-sys 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "4bee31aa3a079d5f3ff9579ea4dcfb1b1a17a40886f5f467436d383e78134b55" +"checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9" +"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" +"checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a" +"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" +"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" +"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3" +"checksum filetime 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "6ab199bf38537c6f38792669e081e0bb278b9b7405bba2642e4e5d15bf732c0e" +"checksum flate2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "e6234dd4468ae5d1e2dbb06fe2b058696fdc50a339c68a393aefbf00bc81e423" +"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344" +"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d" +"checksum fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab76cfd2aaa59b7bf6688ad9ba15bbae64bff97f04ea02144cfd3443e5c2866" +"checksum git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0c1c0203d653f4140241da0c1375a404f0a397249ec818cd2076c6280c50f6fa" +"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e" +"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" +"checksum globset 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "feeb1b6840809ef5efcf7a4a990bc4e1b7ee3df8cf9e2379a75aeb2ba42ac9c3" +"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4" +"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" +"checksum home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f25ae61099d8f3fee8b483df0bd4ecccf4b2731897aad40d50eca1b641fe6db" +"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" +"checksum ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5" +"checksum itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8324a32baf01e2ae060e9de58ed0bc2320c9a2833491ee36cd3b4c414de4db8c" +"checksum jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "443ae8bc0af6c106e6e8b77e04684faecc1a5ce94e058f4c2b0a037b0ea1b133" +"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c9e5e58fa1a4c3b915a561a78a22ee0cac6ab97dca2504428bc1cb074375f8d5" +"checksum libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)" = "d1419b2939a0bc44b77feb34661583c7546b532b192feab36249ab584b86856c" +"checksum libgit2-sys 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)" = "6f74b4959cef96898f5123148724fc7dee043b9a6b99f219d948851bfbe53cb2" +"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75" +"checksum libz-sys 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "44ebbc760fd2d2f4d93de09a0e13d97e057612052e871da9985cedcb451e6bd5" +"checksum log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "880f77541efa6e5cc74e76910c9884d9859683118839d6a1dc3b11e63512565b" +"checksum magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf0336886480e671965f794bc9b6fce88503563013d1bfb7a502c81fe3ac527" +"checksum magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40d014c7011ac470ae28e2f76a02bfea4a8480f73e701353b49ad7a8d75f4699" +"checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" +"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" +"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" +"checksum miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "609ce024854aeb19a0ef7567d348aaa5a746b32fb72e336df7fcc16869d7e2b4" +"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" +"checksum net2 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)" = "3a80f842784ef6c9a958b68b7516bc7e35883c614004dd94959a4dca1b716c09" +"checksum num 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "a311b77ebdc5dd4cf6449d81e4135d9f0e3b153839ac90e648a8ef538f923525" +"checksum num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "8fd0f8dbb4c0960998958a796281d88c16fbe68d87b1baa6f31e2979e81fd0bd" +"checksum num-complex 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "503e668405c5492d67cf662a81e05be40efe2e6bcf10f7794a07bd9865e704e6" +"checksum num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "d1452e8b06e448a07f0e6ebb0bb1d92b8890eea63288c0b627331d53514d0fba" +"checksum num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "7485fcc84f85b4ecd0ea527b14189281cf27d60e583ae65ebc9c088b13dffe01" +"checksum num-rational 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "288629c76fac4b33556f4b7ab57ba21ae202da65ba8b77466e6d598e31990790" +"checksum num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "99843c856d68d8b4313b03a17e33c4bb42ae8f6610ea81b28abe076ac721b9b0" +"checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" +"checksum openssl 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)" = "816914b22eb15671d62c73442a51978f311e911d6a6f6cbdafa6abce1b5038fc" +"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf" +"checksum openssl-sys 0.9.19 (registry+https://github.com/rust-lang/crates.io-index)" = "1e4c63a7d559c1e5afa6d6a9e6fa34bbc5f800ffc9ae08b72c605420b0c4f5e8" +"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" +"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" +"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478" +"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" +"checksum rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "eb250fd207a4729c976794d03db689c9be1d634ab5a1c9da9492a13d8fecbcdf" +"checksum redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "8dde11f18c108289bef24469638a04dce49da56084f2d50618b226e47eb04509" +"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" +"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" +"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" +"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" +"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" +"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e" +"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" +"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d" +"checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57" +"checksum semver 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bee2bc909ab2d8d60dab26e8cad85b25d795b14603a0dcb627b78b9d30b6454b" +"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +"checksum serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "6a7046c9d4c6c522d10b2d098f9bebe2bef227e0e74044d8c1bfcf6b476af799" +"checksum serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "1afcaae083fd1c46952a315062326bc9957f182358eb7da03b57ef1c688f7aa9" +"checksum serde_derive_internals 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd381f6d01a6616cdba8530492d453b7761b456ba974e98768a18cad2cd76f58" +"checksum serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "190e9765dcedb56be63b6e0993a006c7e3b071a016a304736e4a315dc01fb142" +"checksum serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d243424e06f9f9c39e3cd36147470fd340db785825e367625f79298a6ac6b7ac" +"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8" +"checksum socket2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9e76b159741052c7deaa9fd0b5ca6b5f79cecf525ed665abfe5002086c6b2791" +"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" +"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" +"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" +"checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b" +"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" +"checksum termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9065bced9c3e43453aa3d56f1e98590b8455b341d2fa191a1090c0dd0b242c75" +"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" +"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" +"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" +"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" +"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e" +"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f" +"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" +"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" +"checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27" +"checksum userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d28ea36bbd9192d75bd9fa9b39f96ddb986eaee824adae5d53b6e51919b2f3" +"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" +"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" +"checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b" +"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff" +"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" +"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" +"checksum wincolor 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a39ee4464208f6430992ff20154216ab2357772ac871d994c51628d60e58b8b0" +"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" diff --git a/collector/compile-benchmarks/cargo/Cargo.toml b/collector/compile-benchmarks/cargo/Cargo.toml new file mode 100644 index 000000000..013558386 --- /dev/null +++ b/collector/compile-benchmarks/cargo/Cargo.toml @@ -0,0 +1,81 @@ +[package] +name = "cargo" +version = "0.23.0" +authors = ["Yehuda Katz ", + "Carl Lerche ", + "Alex Crichton "] +license = "MIT/Apache-2.0" +homepage = "https://crates.io" +repository = "https://github.com/rust-lang/cargo" +documentation = "https://docs.rs/cargo" +description = """ +Cargo, a package manager for Rust. +""" + +[lib] +name = "cargo" +path = "src/cargo/lib.rs" + +[dependencies] +atty = "0.2" +crates-io = { path = "src/crates-io", version = "0.12" } +crossbeam = "0.3" +crypto-hash = "0.3" +curl = "0.4.6" +docopt = "0.8.1" +env_logger = "0.4" +error-chain = "0.11.0-rc.2" +filetime = "0.1" +flate2 = "0.2" +fs2 = "0.4" +git2 = "0.6" +git2-curl = "0.7" +glob = "0.2" +hex = "0.2" +home = "0.3" +ignore = "^0.2.2" +jobserver = "0.1.6" +libc = "=0.2.54" +libgit2-sys = "0.6" +log = "0.3" +num_cpus = "1.0" +same-file = "0.1" +scoped-tls = "0.1" +semver = { version = "0.8.0", features = ["serde"] } +serde = "1.0" +serde_derive = "1.0" +serde_ignored = "0.0.4" +serde_json = "1.0" +shell-escape = "0.1" +tar = { version = "0.4", default-features = false } +tempdir = "0.3" +termcolor = "0.3" +toml = "0.4" +#url = "1.1" +url = { path = "url-1.5.1" } + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation = { version = "0.4.4", features = ["mac_os_10_7_support"] } + +[target.'cfg(windows)'.dependencies] +kernel32-sys = "0.2" +miow = "0.2" +psapi-sys = "0.1" +winapi = "0.2" + +[dev-dependencies] +bufstream = "0.1" +cargotest = { path = "tests/cargotest" } +filetime = "0.1" +hamcrest = "=0.1.1" + +[[bin]] +name = "cargo" +test = false +doc = false + +[workspace] + +[patch.crates-io] +url = { path = "url-1.5.1" } +socket2 = { path = "socket2-0.2.3" } diff --git a/collector/compile-benchmarks/cargo/LICENSE-APACHE b/collector/compile-benchmarks/cargo/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/collector/compile-benchmarks/cargo/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/collector/compile-benchmarks/cargo/LICENSE-MIT b/collector/compile-benchmarks/cargo/LICENSE-MIT new file mode 100644 index 000000000..31aa79387 --- /dev/null +++ b/collector/compile-benchmarks/cargo/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/cargo/LICENSE-THIRD-PARTY b/collector/compile-benchmarks/cargo/LICENSE-THIRD-PARTY new file mode 100644 index 000000000..c9897b96f --- /dev/null +++ b/collector/compile-benchmarks/cargo/LICENSE-THIRD-PARTY @@ -0,0 +1,1272 @@ +The Cargo source code itself does not bundle any third party libraries, but it +depends on a number of libraries which carry their own copyright notices and +license terms. These libraries are normally all linked static into the binary +distributions of Cargo: + +* OpenSSL - http://www.openssl.org/source/license.html + + Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. All advertising materials mentioning features or use of this + software must display the following acknowledgment: + "This product includes software developed by the OpenSSL Project + for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + + 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + endorse or promote products derived from this software without + prior written permission. For written permission, please contact + openssl-core@openssl.org. + + 5. Products derived from this software may not be called "OpenSSL" + nor may "OpenSSL" appear in their names without prior written + permission of the OpenSSL Project. + + 6. Redistributions of any form whatsoever must retain the following + acknowledgment: + "This product includes software developed by the OpenSSL Project + for use in the OpenSSL Toolkit (http://www.openssl.org/)" + + THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + OF THE POSSIBILITY OF SUCH DAMAGE. + ==================================================================== + + This product includes cryptographic software written by Eric Young + (eay@cryptsoft.com). This product includes software written by Tim + Hudson (tjh@cryptsoft.com). + + --- + + Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + All rights reserved. + + This package is an SSL implementation written + by Eric Young (eay@cryptsoft.com). + The implementation was written so as to conform with Netscapes SSL. + + This library is free for commercial and non-commercial use as long as + the following conditions are aheared to. The following conditions + apply to all code found in this distribution, be it the RC4, RSA, + lhash, DES, etc., code; not just the SSL code. The SSL documentation + included with this distribution is covered by the same copyright terms + except that the holder is Tim Hudson (tjh@cryptsoft.com). + + Copyright remains Eric Young's, and as such any Copyright notices in + the code are not to be removed. + If this package is used in a product, Eric Young should be given attribution + as the author of the parts of the library used. + This can be in the form of a textual message at program startup or + in documentation (online or textual) provided with the package. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. All advertising materials mentioning features or use of this software + must display the following acknowledgement: + "This product includes cryptographic software written by + Eric Young (eay@cryptsoft.com)" + The word 'cryptographic' can be left out if the rouines from the library + being used are not cryptographic related :-). + 4. If you include any Windows specific code (or a derivative thereof) from + the apps directory (application code) you must include an acknowledgement: + "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + + THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + The licence and distribution terms for any publically available version or + derivative of this code cannot be changed. i.e. this code cannot simply be + copied and put under another distribution licence + [including the GNU Public Licence.] + +* libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING + + libgit2 is Copyright (C) the libgit2 contributors, + unless otherwise stated. See the AUTHORS file for details. + + Note that the only valid version of the GPL as far as this project + is concerned is _this_ particular version of the license (ie v2, not + v2.2 or v3.x or whatever), unless explicitly otherwise stated. + + ---------------------------------------------------------------------- + + LINKING EXCEPTION + + In addition to the permissions in the GNU General Public License, + the authors give you unlimited permission to link the compiled + version of this library into combinations with other programs, + and to distribute those combinations without any restriction + coming from the use of this file. (The General Public License + restrictions do apply in other respects; for example, they cover + modification of the file, and distribution when not linked into + a combined executable.) + + ---------------------------------------------------------------------- + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + License is intended to guarantee your freedom to share and change free + software--to make sure the software is free for all its users. This + General Public License applies to most of the Free Software + Foundation's software and to any other program whose authors commit to + using it. (Some other Free Software Foundation software is covered by + the GNU Library General Public License instead.) You can apply it to + your programs, too. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + this service if you wish), that you receive source code or can get it + if you want it, that you can change the software or use pieces of it + in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid + anyone to deny you these rights or to ask you to surrender the rights. + These restrictions translate to certain responsibilities for you if you + distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether + gratis or for a fee, you must give the recipients all the rights that + you have. You must make sure that they, too, receive or can get the + source code. And you must show them these terms so they know their + rights. + + We protect your rights with two steps: (1) copyright the software, and + (2) offer you this license which gives you legal permission to copy, + distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain + that everyone understands that there is no warranty for this free + software. If the software is modified by someone else and passed on, we + want its recipients to know that what they have is not the original, so + that any problems introduced by others will not reflect on the original + authors' reputations. + + Finally, any free program is threatened constantly by software + patents. We wish to avoid the danger that redistributors of a free + program will individually obtain patent licenses, in effect making the + program proprietary. To prevent this, we have made it clear that any + patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and + modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains + a notice placed by the copyright holder saying it may be distributed + under the terms of this General Public License. The "Program", below, + refers to any such program or work, and a "work based on the Program" + means either the Program or any derivative work under copyright law: + that is to say, a work containing the Program or a portion of it, + either verbatim or with modifications and/or translated into another + language. (Hereinafter, translation is included without limitation in + the term "modification".) Each licensee is addressed as "you". + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running the Program is not restricted, and the output from the Program + is covered only if its contents constitute a work based on the + Program (independent of having been made by running the Program). + Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's + source code as you receive it, in any medium, provided that you + conspicuously and appropriately publish on each copy an appropriate + copyright notice and disclaimer of warranty; keep intact all the + notices that refer to this License and to the absence of any warranty; + and give any other recipients of the Program a copy of this License + along with the Program. + + You may charge a fee for the physical act of transferring a copy, and + you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion + of it, thus forming a work based on the Program, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Program, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Program, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Program. + + In addition, mere aggregation of another work not based on the Program + with the Program (or with a work based on the Program) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, + under Section 2) in object code or executable form under the terms of + Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + + The source code for a work means the preferred form of the work for + making modifications to it. For an executable work, complete source + code means all the source code for all modules it contains, plus any + associated interface definition files, plus the scripts used to + control compilation and installation of the executable. However, as a + special exception, the source code distributed need not include + anything that is normally distributed (in either source or binary + form) with the major components (compiler, kernel, and so on) of the + operating system on which the executable runs, unless that component + itself accompanies the executable. + + If distribution of executable or object code is made by offering + access to copy from a designated place, then offering equivalent + access to copy the source code from the same place counts as + distribution of the source code, even though third parties are not + compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program + except as expressly provided under this License. Any attempt + otherwise to copy, modify, sublicense or distribute the Program is + void, and will automatically terminate your rights under this License. + However, parties who have received copies, or rights, from you under + this License will not have their licenses terminated so long as such + parties remain in full compliance. + + 5. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Program or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Program (or any work based on the + Program), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the + original licensor to copy, distribute or modify the Program subject to + these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties to + this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Program at all. For example, if a patent + license would not permit royalty-free redistribution of the Program by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Program. + + If any portion of this section is held invalid or unenforceable under + any particular circumstance, the balance of the section is intended to + apply and the section as a whole is intended to apply in other + circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system, which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Program under this License + may add an explicit geographical distribution limitation excluding + those countries, so that distribution is permitted only in or among + countries not thus excluded. In such case, this License incorporates + the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions + of the General Public License from time to time. Such new versions will + be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free + programs whose distribution conditions are different, write to the author + to ask for permission. For software which is copyrighted by the Free + Software Foundation, write to the Free Software Foundation; we sometimes + make exceptions for this. Our decision will be guided by the two goals + of preserving the free status of all derivatives of our free software and + of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY + FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS + TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE + PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, + REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED + TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY + YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest + possible use to the public, the best way to achieve this is to make it + free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest + to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least + the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + + Also add information on how to contact you by electronic and paper mail. + + If the program is interactive, make it output a short notice like this + when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + + The hypothetical commands `show w' and `show c' should show the appropriate + parts of the General Public License. Of course, the commands you use may + be called something other than `show w' and `show c'; they could even be + mouse-clicks or menu items--whatever suits your program. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the program, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + + This General Public License does not permit incorporating your program into + proprietary programs. If your program is a subroutine library, you may + consider it more useful to permit linking proprietary applications with the + library. If this is what you want to do, use the GNU Library General + Public License instead of this License. + + ---------------------------------------------------------------------- + + The bundled ZLib code is licensed under the ZLib license: + + Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + + ---------------------------------------------------------------------- + + The Clar framework is licensed under the MIT license: + + Copyright (C) 2011 by Vicent Marti + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + ---------------------------------------------------------------------- + + The regex library (deps/regex/) is licensed under the GNU LGPL + + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + [This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + Licenses are intended to guarantee your freedom to share and change + free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some + specially designated software packages--typically libraries--of the + Free Software Foundation and other authors who decide to use it. You + can use it too, but we suggest you first think carefully about whether + this license or the ordinary General Public License is the better + strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, + not price. Our General Public Licenses are designed to make sure that + you have the freedom to distribute copies of free software (and charge + for this service if you wish); that you receive source code or can get + it if you want it; that you can change the software and use pieces of + it in new free programs; and that you are informed that you can do + these things. + + To protect your rights, we need to make restrictions that forbid + distributors to deny you these rights or to ask you to surrender these + rights. These restrictions translate to certain responsibilities for + you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis + or for a fee, you must give the recipients all the rights that we gave + you. You must make sure that they, too, receive or can get the source + code. If you link other code with the library, you must provide + complete object files to the recipients, so that they can relink them + with the library after making changes to the library and recompiling + it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the + library, and (2) we offer you this license, which gives you legal + permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that + there is no warranty for the free library. Also, if the library is + modified by someone else and passed on, the recipients should know + that what they have is not the original version, so that the original + author's reputation will not be affected by problems that might be + introduced by others. + + Finally, software patents pose a constant threat to the existence of + any free program. We wish to make sure that a company cannot + effectively restrict the users of a free program by obtaining a + restrictive license from a patent holder. Therefore, we insist that + any patent license obtained for a version of the library must be + consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the + ordinary GNU General Public License. This license, the GNU Lesser + General Public License, applies to certain designated libraries, and + is quite different from the ordinary General Public License. We use + this license for certain libraries in order to permit linking those + libraries into non-free programs. + + When a program is linked with a library, whether statically or using + a shared library, the combination of the two is legally speaking a + combined work, a derivative of the original library. The ordinary + General Public License therefore permits such linking only if the + entire combination fits its criteria of freedom. The Lesser General + Public License permits more lax criteria for linking other code with + the library. + + We call this license the "Lesser" General Public License because it + does Less to protect the user's freedom than the ordinary General + Public License. It also provides other free software developers Less + of an advantage over competing non-free programs. These disadvantages + are the reason we use the ordinary General Public License for many + libraries. However, the Lesser license provides advantages in certain + special circumstances. + + For example, on rare occasions, there may be a special need to + encourage the widest possible use of a certain library, so that it becomes + a de-facto standard. To achieve this, non-free programs must be + allowed to use the library. A more frequent case is that a free + library does the same job as widely used non-free libraries. In this + case, there is little to gain by limiting the free library to free + software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free + programs enables a greater number of people to use a large body of + free software. For example, permission to use the GNU C Library in + non-free programs enables many more people to use the whole GNU + operating system, as well as its variant, the GNU/Linux operating + system. + + Although the Lesser General Public License is Less protective of the + users' freedom, it does ensure that the user of a program that is + linked with the Library has the freedom and the wherewithal to run + that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and + modification follow. Pay close attention to the difference between a + "work based on the library" and a "work that uses the library". The + former contains code derived from the library, whereas the latter must + be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other + program which contains a notice placed by the copyright holder or + other authorized party saying it may be distributed under the terms of + this Lesser General Public License (also called "this License"). + Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data + prepared so as to be conveniently linked with application programs + (which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work + which has been distributed under these terms. A "work based on the + Library" means either the Library or any derivative work under + copyright law: that is to say, a work containing the Library or a + portion of it, either verbatim or with modifications and/or translated + straightforwardly into another language. (Hereinafter, translation is + included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for + making modifications to it. For a library, complete source code means + all the source code for all modules it contains, plus any associated + interface definition files, plus the scripts used to control compilation + and installation of the library. + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running a program using the Library is not restricted, and output from + such a program is covered only if its contents constitute a work based + on the Library (independent of the use of the Library in a tool for + writing it). Whether that is true depends on what the Library does + and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's + complete source code as you receive it, in any medium, provided that + you conspicuously and appropriately publish on each copy an + appropriate copyright notice and disclaimer of warranty; keep intact + all the notices that refer to this License and to the absence of any + warranty; and distribute a copy of this License along with the + Library. + + You may charge a fee for the physical act of transferring a copy, + and you may at your option offer warranty protection in exchange for a + fee. + + 2. You may modify your copy or copies of the Library or any portion + of it, thus forming a work based on the Library, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Library, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Library, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote + it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Library. + + In addition, mere aggregation of another work not based on the Library + with the Library (or with a work based on the Library) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public + License instead of this License to a given copy of the Library. To do + this, you must alter all the notices that refer to this License, so + that they refer to the ordinary GNU General Public License, version 2, + instead of to this License. (If a newer version than version 2 of the + ordinary GNU General Public License has appeared, then you can specify + that version instead if you wish.) Do not make any other change in + these notices. + + Once this change is made in a given copy, it is irreversible for + that copy, so the ordinary GNU General Public License applies to all + subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of + the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or + derivative of it, under Section 2) in object code or executable form + under the terms of Sections 1 and 2 above provided that you accompany + it with the complete corresponding machine-readable source code, which + must be distributed under the terms of Sections 1 and 2 above on a + medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy + from a designated place, then offering equivalent access to copy the + source code from the same place satisfies the requirement to + distribute the source code, even though third parties are not + compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the + Library, but is designed to work with the Library by being compiled or + linked with it, is called a "work that uses the Library". Such a + work, in isolation, is not a derivative work of the Library, and + therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library + creates an executable that is a derivative of the Library (because it + contains portions of the Library), rather than a "work that uses the + library". The executable is therefore covered by this License. + Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file + that is part of the Library, the object code for the work may be a + derivative work of the Library even though the source code is not. + Whether this is true is especially significant if the work can be + linked without the Library, or if the work is itself a library. The + threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data + structure layouts and accessors, and small macros and small inline + functions (ten lines or less in length), then the use of the object + file is unrestricted, regardless of whether it is legally a derivative + work. (Executables containing this object code plus portions of the + Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may + distribute the object code for the work under the terms of Section 6. + Any executables containing that work also fall under Section 6, + whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or + link a "work that uses the Library" with the Library to produce a + work containing portions of the Library, and distribute that work + under terms of your choice, provided that the terms permit + modification of the work for the customer's own use and reverse + engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the + Library is used in it and that the Library and its use are covered by + this License. You must supply a copy of this License. If the work + during execution displays copyright notices, you must include the + copyright notice for the Library among them, as well as a reference + directing the user to the copy of this License. Also, you must do one + of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the + Library" must include any data and utility programs needed for + reproducing the executable from it. However, as a special exception, + the materials to be distributed need not include anything that is + normally distributed (in either source or binary form) with the major + components (compiler, kernel, and so on) of the operating system on + which the executable runs, unless that component itself accompanies + the executable. + + It may happen that this requirement contradicts the license + restrictions of other proprietary libraries that do not normally + accompany the operating system. Such a contradiction means you cannot + use both them and the Library together in an executable that you + distribute. + + 7. You may place library facilities that are a work based on the + Library side-by-side in a single library together with other library + facilities not covered by this License, and distribute such a combined + library, provided that the separate distribution of the work based on + the Library and of the other library facilities is otherwise + permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute + the Library except as expressly provided under this License. Any + attempt otherwise to copy, modify, sublicense, link with, or + distribute the Library is void, and will automatically terminate your + rights under this License. However, parties who have received copies, + or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Library or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Library (or any work based on the + Library), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the + Library), the recipient automatically receives a license from the + original licensor to copy, distribute, link with or modify the Library + subject to these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties with + this License. + + 11. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Library at all. For example, if a patent + license would not permit royalty-free redistribution of the Library by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Library. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply, + and the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Library under this License may add + an explicit geographical distribution limitation excluding those countries, + so that distribution is permitted only in or among countries not thus + excluded. In such case, this License incorporates the limitation as if + written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new + versions of the Lesser General Public License from time to time. + Such new versions will be similar in spirit to the present version, + but may differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the Library + specifies a version number of this License which applies to it and + "any later version", you have the option of following the terms and + conditions either of that version or of any later version published by + the Free Software Foundation. If the Library does not specify a + license version number, you may choose any version ever published by + the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free + programs whose distribution conditions are incompatible with these, + write to the author to ask for permission. For software which is + copyrighted by the Free Software Foundation, write to the Free + Software Foundation; we sometimes make exceptions for this. Our + decision will be guided by the two goals of preserving the free status + of all derivatives of our free software and of promoting the sharing + and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO + WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. + EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR + OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE + LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME + THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN + WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY + AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU + FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR + CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE + LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING + RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A + FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF + SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest + possible use to the public, we recommend making it free software that + everyone can redistribute and change. You can do so by permitting + redistribution under these terms (or, alternatively, under the terms of the + ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is + safest to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least the + "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + + Also add information on how to contact you by electronic and paper mail. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the library, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + + That's all there is to it! + + ---------------------------------------------------------------------- + +* libssh2 - http://www.libssh2.org/license.html + + Copyright (c) 2004-2007 Sara Golemon + Copyright (c) 2005,2006 Mikhail Gusarov + Copyright (c) 2006-2007 The Written Word, Inc. + Copyright (c) 2007 Eli Fant + Copyright (c) 2009 Daniel Stenberg + Copyright (C) 2008, 2009 Simon Josefsson + All rights reserved. + + Redistribution and use in source and binary forms, + with or without modification, are permitted provided + that the following conditions are met: + + Redistributions of source code must retain the above + copyright notice, this list of conditions and the + following disclaimer. + + Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + Neither the name of the copyright holder nor the names + of any other contributors may be used to endorse or + promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + OF SUCH DAMAGE. + +* libcurl - http://curl.haxx.se/docs/copyright.html + + COPYRIGHT AND PERMISSION NOTICE + + Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se. + + All rights reserved. + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + USE OR OTHER DEALINGS IN THE SOFTWARE. + + Except as contained in this notice, the name of a copyright holder shall not + be used in advertising or otherwise to promote the sale, use or other + dealings in this Software without prior written authorization of the + copyright holder. + +* flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT +* link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT +* openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT +* toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT +* libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT +* git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT +* tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT + + Copyright (c) 2014 Alex Crichton + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT +* semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT + + Copyright (c) 2014 The Rust Project Developers + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT + + Copyright (c) 2006-2009 Graydon Hoare + Copyright (c) 2009-2013 Mozilla Foundation + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt + + The MIT License (MIT) + + Copyright (c) 2013, Kang Seonghoon. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +* curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE + + Copyright (c) 2014 Carl Lerche + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +* docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE + + This is free and unencumbered software released into the public domain. + + Anyone is free to copy, modify, publish, use, compile, sell, or + distribute this software, either in source code form or as a compiled + binary, for any purpose, commercial or non-commercial, and by any + means. + + In jurisdictions that recognize copyright laws, the author or authors + of this software dedicate any and all copyright interest in the + software to the public domain. We make this dedication for the benefit + of the public at large and to the detriment of our heirs and + successors. We intend this dedication to be an overt act of + relinquishment in perpetuity of all present and future rights to this + software under copyright law. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + For more information, please refer to + diff --git a/collector/compile-benchmarks/cargo/README.md b/collector/compile-benchmarks/cargo/README.md new file mode 100644 index 000000000..dde37be1b --- /dev/null +++ b/collector/compile-benchmarks/cargo/README.md @@ -0,0 +1,84 @@ +Cargo downloads your Rust project’s dependencies and compiles your project. + +Learn more at http://doc.crates.io/ + +## Code Status +[![Build Status](https://travis-ci.org/rust-lang/cargo.svg?branch=master)](https://travis-ci.org/rust-lang/cargo) +[![Build Status](https://ci.appveyor.com/api/projects/status/github/rust-lang/cargo?branch=master&svg=true)](https://ci.appveyor.com/project/rust-lang-libs/cargo) + +## Installing Cargo + +Cargo is distributed by default with Rust, so if you've got `rustc` installed +locally you probably also have `cargo` installed locally. + +## Compiling from Source + +Cargo requires the following tools and packages to build: + +* `python` +* `curl` (on Unix) +* `cmake` +* OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu) +* `cargo` and `rustc` + +First, you'll want to check out this repository + +``` +git clone --recursive https://github.com/rust-lang/cargo +cd cargo +``` + +With `cargo` already installed, you can simply run: + +``` +cargo build --release +``` + +## Adding new subcommands to Cargo + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. See [the Wiki page][third-party-subcommands] for more details and +a list of known community-developed subcommands. + +[third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands + + +## Releases + +High level release notes are available as part of [Rust's release notes][rel]. +Cargo releases coincide with Rust releases. + +[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md + +## Reporting issues + +Found a bug? We'd love to know about it! + +Please report all issues on the github [issue tracker][issues]. + +[issues]: https://github.com/rust-lang/cargo/issues + + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + + +## License + +Cargo is primarily distributed under the terms of both the MIT license +and the Apache License (Version 2.0). + +See LICENSE-APACHE and LICENSE-MIT for details. + +### Third party software + +This product includes software developed by the OpenSSL Project +for use in the OpenSSL Toolkit (http://www.openssl.org/). + +In binary form, this product includes software that is licensed under the +terms of the GNU General Public License, version 2, with a linking exception, +which can be obtained from the [upstream repository][1]. + +[1]: https://github.com/libgit2/libgit2 + diff --git a/collector/compile-benchmarks/cargo/appveyor.yml b/collector/compile-benchmarks/cargo/appveyor.yml new file mode 100644 index 000000000..e64eeb279 --- /dev/null +++ b/collector/compile-benchmarks/cargo/appveyor.yml @@ -0,0 +1,41 @@ +environment: + + # At the time this was added AppVeyor was having troubles with checking + # revocation of SSL certificates of sites like static.rust-lang.org and what + # we think is crates.io. The libcurl HTTP client by default checks for + # revocation on Windows and according to a mailing list [1] this can be + # disabled. + # + # The `CARGO_HTTP_CHECK_REVOKE` env var here tells cargo to disable SSL + # revocation checking on Windows in libcurl. Note, though, that rustup, which + # we're using to download Rust here, also uses libcurl as the default backend. + # Unlike Cargo, however, rustup doesn't have a mechanism to disable revocation + # checking. To get rustup working we set `RUSTUP_USE_HYPER` which forces it to + # use the Hyper instead of libcurl backend. Both Hyper and libcurl use + # schannel on Windows but it appears that Hyper configures it slightly + # differently such that revocation checking isn't turned on by default. + # + # [1]: https://curl.haxx.se/mail/lib-2016-03/0202.html + RUSTUP_USE_HYPER: 1 + CARGO_HTTP_CHECK_REVOKE: false + + matrix: + - TARGET: x86_64-pc-windows-msvc + OTHER_TARGET: i686-pc-windows-msvc + MAKE_TARGETS: test-unit-x86_64-pc-windows-msvc + +install: + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + - rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - rustup target add %OTHER_TARGET% + - rustc -V + - cargo -V + - git submodule update --init + +clone_depth: 1 + +build: false + +test_script: + - cargo test diff --git a/collector/compile-benchmarks/cargo/perf-config.json b/collector/compile-benchmarks/cargo/perf-config.json new file mode 100644 index 000000000..b3c296ba0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/perf-config.json @@ -0,0 +1,8 @@ +{ + "cargo_rustc_opts": "--cap-lints=warn", + "cargo_opts": "--lib", + "runs": 1, + "touch_file": "src/cargo/lib.rs", + "category": "stable", + "artifact": "library" +} diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/.appveyor.yml b/collector/compile-benchmarks/cargo/socket2-0.2.3/.appveyor.yml new file mode 100644 index 000000000..4880006c5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/.appveyor.yml @@ -0,0 +1,16 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-msvc +install: + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + - rustup-init.exe -y --default-host x86_64-pc-windows-msvc + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - if NOT "%TARGET%" == "x86_64-pc-windows-msvc" rustup target add %TARGET% + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test + - cargo test --features reuseport diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/.gitignore b/collector/compile-benchmarks/cargo/socket2-0.2.3/.gitignore new file mode 100644 index 000000000..4308d8220 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/.gitignore @@ -0,0 +1,3 @@ +target/ +**/*.rs.bk +Cargo.lock diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/.travis.yml b/collector/compile-benchmarks/cargo/socket2-0.2.3/.travis.yml new file mode 100644 index 000000000..e6a4bd7c1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/.travis.yml @@ -0,0 +1,27 @@ +language: rust +rust: + - stable +sudo: false +before_script: + - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH +script: + - cargo test + - cargo test --features "reuseport unix pair" + - cargo doc --no-deps --all-features +after_success: + - travis-cargo --only nightly doc-upload +env: + global: + secure: "qibsiOrfM/GjYgYFXycSqKMwIK9ZR4cvHZsSqTtqrtxGq5Q7jTwMqdDl8KHDgX1a4it4tGay+7joex8k2zL6OQ+FljQGQq54EDiGw82HWix/fBpOMjMszw+GEDMG/9hUSb6HFdzAKLPAsBRvIs2QteJ60GhL/w4Z/EmfHlVKMnVsYUjfBf5BNlkv8yFvRMY6QqL+F85N7dDQ7JAgdiP79jR7LP8IlCEu/8pgSrf9pSqAHSC1Co1CaN8uhhMlcIIOZ5qYAK4Xty26r2EDzPm5Lw2Bd7a4maN0x+Be2DJvrnX30QkJNNU1XhxYkeZEeUCYAlUhBE5nBHpyyrbAxv+rJodPeyRl5EVpyqi8htPVmcnuA2XpNoHCud7CnzxaFytGvAC5kp0EgS7f3ac4hTnZXCfP0CvnT5UyWfWv9yLwQycdYcAsV4TnKxVAw4ykApGey+h0dyIM2VnzRPOo9D2ZS+JpzPHtx/PXD7aN7IungfTj4PmT+i00QNzkzJR9BqYKmEDBUcz6MLctg4D6xChhN8Go4hvk22F0RVyvEg1MAvXc07EKeWXG/VZ+H2frcPEceMGRBBHiOfOEE/2utNYgvIcmQxd1hvbm3cQOIjeXU2rGneN86cSmx7zNlfOyJUoBfsgGvSEzRxUueibUCaujB/El70HGrMlTnXeERiyd/2Y=" + +matrix: + include: + - rust: beta + - rust: nightly + +notifications: + email: + on_success: never +os: + - linux + - osx diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml b/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml new file mode 100644 index 000000000..4e63117f5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml @@ -0,0 +1,44 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "socket2" +version = "0.2.3" +authors = ["Alex Crichton "] +description = "Utilities for handling networking sockets with a maximal amount of configuration\npossible intended.\n" +homepage = "https://github.com/alexcrichton/socket2-rs" +documentation = "https://docs.rs/socket2" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/socket2-rs" +[package.metadata.docs.rs] +all-features = true +[dev-dependencies.tempdir] +version = "0.3" + +[features] +reuseport = [] +pair = [] +unix = [] +[target."cfg(windows)".dependencies.ws2_32-sys] +version = "0.2" + +[target."cfg(windows)".dependencies.winapi] +version = "0.2" + +[target."cfg(windows)".dependencies.kernel32-sys] +version = "0.2" +[target."cfg(unix)".dependencies.libc] +version = "0.2.14" + +[target."cfg(unix)".dependencies.cfg-if] +version = "0.1" diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml.orig b/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml.orig new file mode 100644 index 000000000..09b6680a3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/Cargo.toml.orig @@ -0,0 +1,33 @@ +[package] +name = "socket2" +version = "0.2.3" +authors = ["Alex Crichton "] +license = "MIT/Apache-2.0" +readme = "README.md" +repository = "https://github.com/alexcrichton/socket2-rs" +homepage = "https://github.com/alexcrichton/socket2-rs" +documentation = "https://docs.rs/socket2" +description = """ +Utilities for handling networking sockets with a maximal amount of configuration +possible intended. +""" + +[package.metadata.docs.rs] +all-features = true + +[target."cfg(windows)".dependencies] +ws2_32-sys = "0.2" +winapi = "0.2" +kernel32-sys = "0.2" + +[target."cfg(unix)".dependencies] +cfg-if = "0.1" +libc = "0.2.14" + +[dev-dependencies] +tempdir = "0.3" + +[features] +reuseport = [] +pair = [] +unix = [] diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-APACHE b/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-MIT b/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/README.md b/collector/compile-benchmarks/cargo/socket2-0.2.3/README.md new file mode 100644 index 000000000..02c16d7d1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/README.md @@ -0,0 +1,14 @@ +# socket2-rs + +[![Build Status](https://travis-ci.org/alexcrichton/socket2-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/socket2-rs) +[![Build status](https://ci.appveyor.com/api/projects/status/hovebj1gr4bgm3d9?svg=true)](https://ci.appveyor.com/project/alexcrichton/socket2-rs) + +[Documentation](https://docs.rs/socket2) + +# License + +`socket2-rs` is primarily distributed under the terms of both the MIT license and +the Apache License (Version 2.0), with portions covered by various BSD-like +licenses. + +See LICENSE-APACHE, and LICENSE-MIT for details. diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/lib.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/lib.rs new file mode 100644 index 000000000..40e6084e6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/lib.rs @@ -0,0 +1,130 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Utilities for handling sockets +//! +//! This crate is sort of an evolution of the `net2` crate after seeing the +//! issues on it over time. The intention of this crate is to provide as direct +//! as possible access to the system's functionality for sockets as possible. No +//! extra fluff (e.g. multiple syscalls or builders) provided in this crate. As +//! a result using this crate can be a little wordy, but it should give you +//! maximal flexibility over configuration of sockets. +//! +//! # Examples +//! +//! ```no_run +//! use std::net::SocketAddr; +//! use socket2::{Socket, Domain, Type}; +//! +//! // create a TCP listener bound to two addresses +//! let socket = Socket::new(Domain::ipv4(), Type::stream(), None).unwrap(); +//! +//! socket.bind(&"127.0.0.1:12345".parse::().unwrap().into()).unwrap(); +//! socket.bind(&"127.0.0.1:12346".parse::().unwrap().into()).unwrap(); +//! socket.listen(128).unwrap(); +//! +//! let listener = socket.into_tcp_listener(); +//! // ... +//! ``` + +#![doc(html_root_url = "https://docs.rs/socket2/0.2")] +#![deny(missing_docs)] + +#[cfg(unix)] extern crate libc; +#[cfg(unix)] #[macro_use] extern crate cfg_if; + +#[cfg(windows)] extern crate kernel32; +#[cfg(windows)] extern crate winapi; +#[cfg(windows)] extern crate ws2_32; + +#[cfg(test)] extern crate tempdir; + +use utils::NetInt; + +#[cfg(unix)] use libc::{sockaddr_storage, socklen_t}; +#[cfg(windows)] use winapi::{SOCKADDR_STORAGE as sockaddr_storage, socklen_t}; + +mod sockaddr; +mod socket; +mod utils; + +#[cfg(unix)] #[path = "sys/unix/mod.rs"] mod sys; +#[cfg(windows)] #[path = "sys/windows.rs"] mod sys; + +/// Newtype, owned, wrapper around a system socket. +/// +/// This type simply wraps an instance of a file descriptor (`c_int`) on Unix +/// and an instance of `SOCKET` on Windows. This is the main type exported by +/// this crate and is intended to mirror the raw semantics of sockets on +/// platforms as closely as possible. Almost all methods correspond to +/// precisely one libc or OS API call which is essentially just a "Rustic +/// translation" of what's below. +/// +/// # Examples +/// +/// ```no_run +/// use std::net::SocketAddr; +/// use socket2::{Socket, Domain, Type, SockAddr}; +/// +/// // create a TCP listener bound to two addresses +/// let socket = Socket::new(Domain::ipv4(), Type::stream(), None).unwrap(); +/// +/// socket.bind(&"127.0.0.1:12345".parse::().unwrap().into()).unwrap(); +/// socket.bind(&"127.0.0.1:12346".parse::().unwrap().into()).unwrap(); +/// socket.listen(128).unwrap(); +/// +/// let listener = socket.into_tcp_listener(); +/// // ... +/// ``` +pub struct Socket { + inner: sys::Socket, +} + +/// The address of a socket. +/// +/// `SockAddr`s may be constructed directly to and from the standard library +/// `SocketAddr`, `SocketAddrV4`, and `SocketAddrV6` types. +pub struct SockAddr { + storage: sockaddr_storage, + len: socklen_t, +} + +/// Specification of the communication domain for a socket. +/// +/// This is a newtype wrapper around an integer which provides a nicer API in +/// addition to an injection point for documentation. Convenience constructors +/// such as `Domain::ipv4`, `Domain::ipv6`, etc, are provided to avoid reaching +/// into libc for various constants. +/// +/// This type is freely interconvertible with the `i32` type, however, if a raw +/// value needs to be provided. +pub struct Domain(i32); + +/// Specification of communication semantics on a socket. +/// +/// This is a newtype wrapper around an integer which provides a nicer API in +/// addition to an injection point for documentation. Convenience constructors +/// such as `Type::stream`, `Type::dgram`, etc, are provided to avoid reaching +/// into libc for various constants. +/// +/// This type is freely interconvertible with the `i32` type, however, if a raw +/// value needs to be provided. +pub struct Type(i32); + +/// Protocol specification used for creating sockets via `Socket::new`. +/// +/// This is a newtype wrapper around an integer which provides a nicer API in +/// addition to an injection point for documentation. +/// +/// This type is freely interconvertible with the `i32` type, however, if a raw +/// value needs to be provided. +pub struct Protocol(i32); + +fn hton(i: I) -> I { i.to_be() } diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sockaddr.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sockaddr.rs new file mode 100644 index 000000000..a9050a6dc --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sockaddr.rs @@ -0,0 +1,194 @@ +use std::fmt; +use std::mem; +use std::net::{SocketAddrV4, SocketAddrV6, SocketAddr}; +use std::ptr; + +#[cfg(unix)] +use libc::{sockaddr, sockaddr_storage, sockaddr_in, sockaddr_in6, sa_family_t, socklen_t, AF_INET, + AF_INET6}; +#[cfg(windows)] +use winapi::{SOCKADDR as sockaddr, SOCKADDR_STORAGE as sockaddr_storage, + SOCKADDR_IN as sockaddr_in, sockaddr_in6, + ADDRESS_FAMILY as sa_family_t, socklen_t, AF_INET, AF_INET6}; + +use SockAddr; + +impl fmt::Debug for SockAddr { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let mut builder = fmt.debug_struct("SockAddr"); + builder.field("family", &self.family()); + if let Some(addr) = self.as_inet() { + builder.field("inet", &addr); + } else if let Some(addr) = self.as_inet6() { + builder.field("inet6", &addr); + } + builder.finish() + } +} + +impl SockAddr { + /// Constructs a `SockAddr` from its raw components. + pub unsafe fn from_raw_parts(addr: *const sockaddr, len: socklen_t) -> SockAddr { + let mut storage = mem::uninitialized::(); + ptr::copy_nonoverlapping(addr as *const _ as *const u8, + &mut storage as *mut _ as *mut u8, + len as usize); + + SockAddr { + storage: storage, + len: len, + } + } + + /// Constructs a `SockAddr` with the family `AF_UNIX` and the provided path. + /// + /// This function is only available on Unix when the `unix` feature is + /// enabled. + /// + /// # Failure + /// + /// Returns an error if the path is longer than `SUN_LEN`. + #[cfg(all(unix, feature = "unix"))] + pub fn unix

(path: P) -> ::std::io::Result + where P: AsRef<::std::path::Path> + { + use std::cmp::Ordering; + use std::io; + use std::os::unix::ffi::OsStrExt; + use libc::{sockaddr_un, AF_UNIX, c_char}; + + unsafe { + let mut addr = mem::zeroed::(); + addr.sun_family = AF_UNIX as sa_family_t; + + let bytes = path.as_ref().as_os_str().as_bytes(); + + match (bytes.get(0), bytes.len().cmp(&addr.sun_path.len())) { + // Abstract paths don't need a null terminator + (Some(&0), Ordering::Greater) => { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "path must be no longer than SUN_LEN")); + } + (Some(&0), _) => {} + (_, Ordering::Greater) | (_, Ordering::Equal) => { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "path must be shorter than SUN_LEN")); + } + _ => {} + } + + for (dst, src) in addr.sun_path.iter_mut().zip(bytes) { + *dst = *src as c_char; + } + // null byte for pathname is already there since we zeroed up front + + let base = &addr as *const _ as usize; + let path = &addr.sun_path as *const _ as usize; + let sun_path_offset = path - base; + + let mut len = sun_path_offset + bytes.len(); + match bytes.get(0) { + Some(&0) | None => {} + Some(_) => len += 1, + } + Ok(SockAddr::from_raw_parts(&addr as *const _ as *const _, len as socklen_t)) + } + } + + unsafe fn as_(&self, family: sa_family_t) -> Option { + if self.storage.ss_family != family { + return None; + } + + Some(mem::transmute_copy(&self.storage)) + } + + /// Returns this address as a `SocketAddrV4` if it is in the `AF_INET` + /// family. + pub fn as_inet(&self) -> Option { + unsafe { self.as_(AF_INET as sa_family_t) } + } + + /// Returns this address as a `SocketAddrV4` if it is in the `AF_INET6` + /// family. + pub fn as_inet6(&self) -> Option { + unsafe { self.as_(AF_INET6 as sa_family_t) } + } + + /// Returns this address's family. + pub fn family(&self) -> sa_family_t { + self.storage.ss_family + } + + /// Returns the size of this address in bytes. + pub fn len(&self) -> socklen_t { + self.len + } + + /// Returns a raw pointer to the address. + pub fn as_ptr(&self) -> *const sockaddr { + &self.storage as *const _ as *const _ + } +} + +// SocketAddrV4 and SocketAddrV6 are just wrappers around sockaddr_in and sockaddr_in6 + +// check to make sure that the sizes at least match up +fn _size_checks(v4: SocketAddrV4, v6: SocketAddrV6) { + /*unsafe { + mem::transmute::(v4); + mem::transmute::(v6); + }*/ +} + +impl From for SockAddr { + fn from(addr: SocketAddrV4) -> SockAddr { + unsafe { + SockAddr::from_raw_parts(&addr as *const _ as *const _, + mem::size_of::() as socklen_t) + } + } +} + +impl From for SockAddr { + fn from(addr: SocketAddrV6) -> SockAddr { + unsafe { + SockAddr::from_raw_parts(&addr as *const _ as *const _, + mem::size_of::() as socklen_t) + } + } +} + +impl From for SockAddr { + fn from(addr: SocketAddr) -> SockAddr { + match addr { + SocketAddr::V4(addr) => addr.into(), + SocketAddr::V6(addr) => addr.into(), + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn inet() { + let raw = "127.0.0.1:80".parse::().unwrap(); + let addr = SockAddr::from(raw); + assert!(addr.as_inet6().is_none()); + let addr = addr.as_inet().unwrap(); + assert_eq!(raw, addr); + } + + #[test] + fn inet6() { + let raw = "[2001:db8::ff00:42:8329]:80" + .parse::() + .unwrap(); + let addr = SockAddr::from(raw); + assert!(addr.as_inet().is_none()); + let addr = addr.as_inet6().unwrap(); + assert_eq!(raw, addr); + } +} diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/socket.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/socket.rs new file mode 100644 index 000000000..eed39ba04 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/socket.rs @@ -0,0 +1,852 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fmt; +use std::io::{self, Read, Write}; +use std::net::{self, Ipv4Addr, Ipv6Addr, Shutdown}; +use std::time::Duration; +#[cfg(all(unix, feature = "unix"))] +use std::os::unix::net::{UnixDatagram, UnixListener, UnixStream}; + +#[cfg(unix)] +use libc as c; +#[cfg(windows)] +use winapi as c; + +use sys; +use {Socket, Protocol, Domain, Type, SockAddr}; + +impl Socket { + /// Creates a new socket ready to be configured. + /// + /// This function corresponds to `socket(2)` and simply creates a new + /// socket, no other configuration is done and further functions must be + /// invoked to configure this socket. + pub fn new(domain: Domain, + type_: Type, + protocol: Option) -> io::Result { + let protocol = protocol.map(|p| p.0).unwrap_or(0); + Ok(Socket { + inner: sys::Socket::new(domain.0, type_.0, protocol)?, + }) + } + + /// Creates a pair of sockets which are connected to each other. + /// + /// This function corresponds to `socketpair(2)`. + /// + /// This function is only available on Unix when the `pair` feature is + /// enabled. + #[cfg(all(unix, feature = "pair"))] + pub fn pair(domain: Domain, + type_: Type, + protocol: Option) -> io::Result<(Socket, Socket)> { + let protocol = protocol.map(|p| p.0).unwrap_or(0); + let sockets = sys::Socket::pair(domain.0, type_.0, protocol)?; + Ok((Socket { inner: sockets.0 }, Socket { inner: sockets.1 })) + } + + /// Consumes this `Socket`, converting it to a `TcpStream`. + pub fn into_tcp_stream(self) -> net::TcpStream { + self.into() + } + + /// Consumes this `Socket`, converting it to a `TcpListener`. + pub fn into_tcp_listener(self) -> net::TcpListener { + self.into() + } + + /// Consumes this `Socket`, converting it to a `UdpSocket`. + pub fn into_udp_socket(self) -> net::UdpSocket { + self.into() + } + + /// Consumes this `Socket`, converting it into a `UnixStream`. + /// + /// This function is only available on Unix when the `unix` feature is + /// enabled. + #[cfg(all(unix, feature = "unix"))] + pub fn into_unix_stream(self) -> UnixStream { + self.into() + } + + /// Consumes this `Socket`, converting it into a `UnixListener`. + /// + /// This function is only available on Unix when the `unix` feature is + /// enabled. + #[cfg(all(unix, feature = "unix"))] + pub fn into_unix_listener(self) -> UnixListener { + self.into() + } + + /// Consumes this `Socket`, converting it into a `UnixDatagram`. + /// + /// This function is only available on Unix when the `unix` feature is + /// enabled. + #[cfg(all(unix, feature = "unix"))] + pub fn into_unix_datagram(self) -> UnixDatagram { + self.into() + } + + /// Initiate a connection on this socket to the specified address. + /// + /// This function directly corresponds to the connect(2) function on Windows + /// and Unix. + /// + /// An error will be returned if `listen` or `connect` has already been + /// called on this builder. + pub fn connect(&self, addr: &SockAddr) -> io::Result<()> { + self.inner.connect(addr) + } + + /// Initiate a connection on this socket to the specified address, only + /// only waiting for a certain period of time for the connection to be + /// established. + /// + /// Unlike many other methods on `Socket`, this does *not* correspond to a + /// single C function. It sets the socket to nonblocking mode, connects via + /// connect(2), and then waits for the connection to complete with poll(2) + /// on Unix and select on Windows. When the connection is complete, the + /// socket is set back to blocking mode. On Unix, this will loop over + /// `EINTR` errors. + /// + /// # Warnings + /// + /// The nonblocking state of the socket is overridden by this function - + /// it will be returned in blocking mode on success, and in an indeterminate + /// state on failure. + /// + /// If the connection request times out, it may still be processing in the + /// background - a second call to `connect` or `connect_timeout` may fail. + pub fn connect_timeout(&self, addr: &SockAddr, timeout: Duration) -> io::Result<()> { + self.inner.connect_timeout(addr, timeout) + } + + /// Binds this socket to the specified address. + /// + /// This function directly corresponds to the bind(2) function on Windows + /// and Unix. + pub fn bind(&self, addr: &SockAddr) -> io::Result<()> { + self.inner.bind(addr) + } + + /// Mark a socket as ready to accept incoming connection requests using + /// accept() + /// + /// This function directly corresponds to the listen(2) function on Windows + /// and Unix. + /// + /// An error will be returned if `listen` or `connect` has already been + /// called on this builder. + pub fn listen(&self, backlog: i32) -> io::Result<()> { + self.inner.listen(backlog) + } + + /// Accept a new incoming connection from this listener. + /// + /// This function will block the calling thread until a new connection is + /// established. When established, the corresponding `Socket` and the + /// remote peer's address will be returned. + pub fn accept(&self) -> io::Result<(Socket, SockAddr)> { + self.inner.accept().map(|(socket, addr)| { + (Socket { inner: socket }, addr) + }) + } + + /// Returns the socket address of the local half of this TCP connection. + pub fn local_addr(&self) -> io::Result { + self.inner.local_addr() + } + + /// Returns the socket address of the remote peer of this TCP connection. + pub fn peer_addr(&self) -> io::Result { + self.inner.peer_addr() + } + + /// Creates a new independently owned handle to the underlying socket. + /// + /// The returned `TcpStream` is a reference to the same stream that this + /// object references. Both handles will read and write the same stream of + /// data, and options set on one stream will be propagated to the other + /// stream. + pub fn try_clone(&self) -> io::Result { + self.inner.try_clone().map(|s| Socket { inner: s }) + } + + /// Get the value of the `SO_ERROR` option on this socket. + /// + /// This will retrieve the stored error in the underlying socket, clearing + /// the field in the process. This can be useful for checking errors between + /// calls. + pub fn take_error(&self) -> io::Result> { + self.inner.take_error() + } + + /// Moves this TCP stream into or out of nonblocking mode. + /// + /// On Unix this corresponds to calling fcntl, and on Windows this + /// corresponds to calling ioctlsocket. + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + self.inner.set_nonblocking(nonblocking) + } + + /// Shuts down the read, write, or both halves of this connection. + /// + /// This function will cause all pending and future I/O on the specified + /// portions to return immediately with an appropriate value. + pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { + self.inner.shutdown(how) + } + + /// Receives data on the socket from the remote address to which it is + /// connected. + /// + /// The [`connect`] method will connect this socket to a remote address. This + /// method will fail if the socket is not connected. + /// + /// [`connect`]: #method.connect + pub fn recv(&self, buf: &mut [u8]) -> io::Result { + self.inner.recv(buf) + } + + /// Receives data on the socket from the remote adress to which it is + /// connected, without removing that data from the queue. On success, + /// returns the number of bytes peeked. + /// + /// Successive calls return the same data. This is accomplished by passing + /// `MSG_PEEK` as a flag to the underlying `recv` system call. + pub fn peek(&self, buf: &mut [u8]) -> io::Result { + self.inner.peek(buf) + } + + /// Receives data from the socket. On success, returns the number of bytes + /// read and the address from whence the data came. + pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.inner.recv_from(buf) + } + + /// Receives data from the socket, without removing it from the queue. + /// + /// Successive calls return the same data. This is accomplished by passing + /// `MSG_PEEK` as a flag to the underlying `recvfrom` system call. + /// + /// On success, returns the number of bytes peeked and the address from + /// whence the data came. + pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.inner.peek_from(buf) + } + + /// Sends data on the socket to a connected peer. + /// + /// This is typically used on TCP sockets or datagram sockets which have + /// been connected. + /// + /// On success returns the number of bytes that were sent. + pub fn send(&self, buf: &[u8]) -> io::Result { + self.inner.send(buf) + } + + /// Sends data on the socket to the given address. On success, returns the + /// number of bytes written. + /// + /// This is typically used on UDP or datagram-oriented sockets. On success + /// returns the number of bytes that were sent. + pub fn send_to(&self, buf: &[u8], addr: &SockAddr) -> io::Result { + self.inner.send_to(buf, addr) + } + + // ================================================ + + /// Gets the value of the `IP_TTL` option for this socket. + /// + /// For more information about this option, see [`set_ttl`][link]. + /// + /// [link]: #method.set_ttl + pub fn ttl(&self) -> io::Result { + self.inner.ttl() + } + + /// Sets the value for the `IP_TTL` option on this socket. + /// + /// This value sets the time-to-live field that is used in every packet sent + /// from this socket. + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + self.inner.set_ttl(ttl) + } + + /// Gets the value of the `IPV6_V6ONLY` option for this socket. + /// + /// For more information about this option, see [`set_only_v6`][link]. + /// + /// [link]: #method.set_only_v6 + pub fn only_v6(&self) -> io::Result { + self.inner.only_v6() + } + + /// Sets the value for the `IPV6_V6ONLY` option on this socket. + /// + /// If this is set to `true` then the socket is restricted to sending and + /// receiving IPv6 packets only. In this case two IPv4 and IPv6 applications + /// can bind the same port at the same time. + /// + /// If this is set to `false` then the socket can be used to send and + /// receive packets from an IPv4-mapped IPv6 address. + pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { + self.inner.set_only_v6(only_v6) + } + + /// Returns the read timeout of this socket. + /// + /// If the timeout is `None`, then `read` calls will block indefinitely. + pub fn read_timeout(&self) -> io::Result> { + self.inner.read_timeout() + } + + /// Sets the read timeout to the timeout specified. + /// + /// If the value specified is `None`, then `read` calls will block + /// indefinitely. It is an error to pass the zero `Duration` to this + /// method. + pub fn set_read_timeout(&self, dur: Option) -> io::Result<()> { + self.inner.set_read_timeout(dur) + } + + /// Returns the write timeout of this socket. + /// + /// If the timeout is `None`, then `write` calls will block indefinitely. + pub fn write_timeout(&self) -> io::Result> { + self.inner.write_timeout() + } + + /// Sets the write timeout to the timeout specified. + /// + /// If the value specified is `None`, then `write` calls will block + /// indefinitely. It is an error to pass the zero `Duration` to this + /// method. + pub fn set_write_timeout(&self, dur: Option) -> io::Result<()> { + self.inner.set_write_timeout(dur) + } + + /// Gets the value of the `TCP_NODELAY` option on this socket. + /// + /// For more information about this option, see [`set_nodelay`][link]. + /// + /// [link]: #method.set_nodelay + pub fn nodelay(&self) -> io::Result { + self.inner.nodelay() + } + + /// Sets the value of the `TCP_NODELAY` option on this socket. + /// + /// If set, this option disables the Nagle algorithm. This means that + /// segments are always sent as soon as possible, even if there is only a + /// small amount of data. When not set, data is buffered until there is a + /// sufficient amount to send out, thereby avoiding the frequent sending of + /// small packets. + pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { + self.inner.set_nodelay(nodelay) + } + + /// Sets the value of the `SO_BROADCAST` option for this socket. + /// + /// When enabled, this socket is allowed to send packets to a broadcast + /// address. + pub fn broadcast(&self) -> io::Result { + self.inner.broadcast() + } + + /// Gets the value of the `SO_BROADCAST` option for this socket. + /// + /// For more information about this option, see + /// [`set_broadcast`][link]. + /// + /// [link]: #method.set_broadcast + pub fn set_broadcast(&self, broadcast: bool) -> io::Result<()> { + self.inner.set_broadcast(broadcast) + } + + /// Gets the value of the `IP_MULTICAST_TTL` option for this socket. + /// + /// For more information about this option, see + /// [`set_multicast_ttl_v4`][link]. + /// + /// [link]: #method.set_multicast_ttl_v4 + pub fn multicast_loop_v4(&self) -> io::Result { + self.inner.multicast_loop_v4() + } + + /// Sets the value of the `IP_MULTICAST_LOOP` option for this socket. + /// + /// If enabled, multicast packets will be looped back to the local socket. + /// Note that this may not have any affect on IPv6 sockets. + pub fn set_multicast_loop_v4(&self, multicast_loop_v4: bool) -> io::Result<()> { + self.inner.set_multicast_loop_v4(multicast_loop_v4) + } + + /// Gets the value of the `IP_MULTICAST_TTL` option for this socket. + /// + /// For more information about this option, see + /// [`set_multicast_ttl_v4`][link]. + /// + /// [link]: #method.set_multicast_ttl_v4 + pub fn multicast_ttl_v4(&self) -> io::Result { + self.inner.multicast_ttl_v4() + } + + /// Sets the value of the `IP_MULTICAST_TTL` option for this socket. + /// + /// Indicates the time-to-live value of outgoing multicast packets for + /// this socket. The default value is 1 which means that multicast packets + /// don't leave the local network unless explicitly requested. + /// + /// Note that this may not have any affect on IPv6 sockets. + pub fn set_multicast_ttl_v4(&self, multicast_ttl_v4: u32) -> io::Result<()> { + self.inner.set_multicast_ttl_v4(multicast_ttl_v4) + } + + /// Gets the value of the `IPV6_MULTICAST_LOOP` option for this socket. + /// + /// For more information about this option, see + /// [`set_multicast_loop_v6`][link]. + /// + /// [link]: #method.set_multicast_loop_v6 + pub fn multicast_loop_v6(&self) -> io::Result { + self.inner.multicast_loop_v6() + } + + /// Sets the value of the `IPV6_MULTICAST_LOOP` option for this socket. + /// + /// Controls whether this socket sees the multicast packets it sends itself. + /// Note that this may not have any affect on IPv4 sockets. + pub fn set_multicast_loop_v6(&self, multicast_loop_v6: bool) -> io::Result<()> { + self.inner.set_multicast_loop_v6(multicast_loop_v6) + } + + + /// Executes an operation of the `IP_ADD_MEMBERSHIP` type. + /// + /// This function specifies a new multicast group for this socket to join. + /// The address must be a valid multicast address, and `interface` is the + /// address of the local interface with which the system should join the + /// multicast group. If it's equal to `INADDR_ANY` then an appropriate + /// interface is chosen by the system. + pub fn join_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + self.inner.join_multicast_v4(multiaddr, interface) + } + + /// Executes an operation of the `IPV6_ADD_MEMBERSHIP` type. + /// + /// This function specifies a new multicast group for this socket to join. + /// The address must be a valid multicast address, and `interface` is the + /// index of the interface to join/leave (or 0 to indicate any interface). + pub fn join_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + self.inner.join_multicast_v6(multiaddr, interface) + } + + /// Executes an operation of the `IP_DROP_MEMBERSHIP` type. + /// + /// For more information about this option, see + /// [`join_multicast_v4`][link]. + /// + /// [link]: #method.join_multicast_v4 + pub fn leave_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + self.inner.leave_multicast_v4(multiaddr, interface) + } + + /// Executes an operation of the `IPV6_DROP_MEMBERSHIP` type. + /// + /// For more information about this option, see + /// [`join_multicast_v6`][link]. + /// + /// [link]: #method.join_multicast_v6 + pub fn leave_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + self.inner.leave_multicast_v6(multiaddr, interface) + } + + /// Reads the linger duration for this socket by getting the SO_LINGER + /// option + pub fn linger(&self) -> io::Result> { + self.inner.linger() + } + + /// Sets the linger duration of this socket by setting the SO_LINGER option + pub fn set_linger(&self, dur: Option) -> io::Result<()> { + self.inner.set_linger(dur) + } + + /// Check the `SO_REUSEADDR` option on this socket. + pub fn reuse_address(&self) -> io::Result { + self.inner.reuse_address() + } + + /// Set value for the `SO_REUSEADDR` option on this socket. + /// + /// This indicates that futher calls to `bind` may allow reuse of local + /// addresses. For IPv4 sockets this means that a socket may bind even when + /// there's a socket already listening on this port. + pub fn set_reuse_address(&self, reuse: bool) -> io::Result<()> { + self.inner.set_reuse_address(reuse) + } + + /// Gets the value of the `SO_RCVBUF` option on this socket. + /// + /// For more information about this option, see + /// [`set_recv_buffer_size`][link]. + /// + /// [link]: #method.set_recv_buffer_size + pub fn recv_buffer_size(&self) -> io::Result { + self.inner.recv_buffer_size() + } + + /// Sets the value of the `SO_RCVBUF` option on this socket. + /// + /// Changes the size of the operating system's receive buffer associated + /// with the socket. + pub fn set_recv_buffer_size(&self, size: usize) -> io::Result<()> { + self.inner.set_recv_buffer_size(size) + } + + /// Gets the value of the `SO_SNDBUF` option on this socket. + /// + /// For more information about this option, see [`set_send_buffer`][link]. + /// + /// [link]: #method.set_send_buffer + pub fn send_buffer_size(&self) -> io::Result { + self.inner.send_buffer_size() + } + + /// Sets the value of the `SO_SNDBUF` option on this socket. + /// + /// Changes the size of the operating system's send buffer associated with + /// the socket. + pub fn set_send_buffer_size(&self, size: usize) -> io::Result<()> { + self.inner.set_send_buffer_size(size) + } + + /// Returns whether keepalive messages are enabled on this socket, and if so + /// the duration of time between them. + /// + /// For more information about this option, see [`set_keepalive`][link]. + /// + /// [link]: #method.set_keepalive + pub fn keepalive(&self) -> io::Result> { + self.inner.keepalive() + } + + /// Sets whether keepalive messages are enabled to be sent on this socket. + /// + /// On Unix, this option will set the `SO_KEEPALIVE` as well as the + /// `TCP_KEEPALIVE` or `TCP_KEEPIDLE` option (depending on your platform). + /// On Windows, this will set the `SIO_KEEPALIVE_VALS` option. + /// + /// If `None` is specified then keepalive messages are disabled, otherwise + /// the duration specified will be the time to remain idle before sending a + /// TCP keepalive probe. + /// + /// Some platforms specify this value in seconds, so sub-second + /// specifications may be omitted. + pub fn set_keepalive(&self, keepalive: Option) -> io::Result<()> { + self.inner.set_keepalive(keepalive) + } + + /// Check the value of the `SO_REUSEPORT` option on this socket. + /// + /// This function is only available on Unix when the `reuseport` feature is + /// enabled. + #[cfg(all(unix, feature = "reuseport"))] + pub fn reuse_port(&self) -> io::Result { + self.inner.reuse_port() + } + + /// Set value for the `SO_REUSEPORT` option on this socket. + /// + /// This indicates that futher calls to `bind` may allow reuse of local + /// addresses. For IPv4 sockets this means that a socket may bind even when + /// there's a socket already listening on this port. + /// + /// This function is only available on Unix when the `reuseport` feature is + /// enabled. + #[cfg(all(unix, feature = "reuseport"))] + pub fn set_reuse_port(&self, reuse: bool) -> io::Result<()> { + self.inner.set_reuse_port(reuse) + } +} + +impl Read for Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.read(buf) + } +} + +impl<'a> Read for &'a Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + (&self.inner).read(buf) + } +} + +impl Write for Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +impl<'a> Write for &'a Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + (&self.inner).write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + (&self.inner).flush() + } +} + +impl fmt::Debug for Socket { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} + +impl From for Socket { + fn from(socket: net::TcpStream) -> Socket { + Socket { inner: socket.into() } + } +} + +impl From for Socket { + fn from(socket: net::TcpListener) -> Socket { + Socket { inner: socket.into() } + } +} + +impl From for Socket { + fn from(socket: net::UdpSocket) -> Socket { + Socket { inner: socket.into() } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixStream) -> Socket { + Socket { inner: socket.into() } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixListener) -> Socket { + Socket { inner: socket.into() } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixDatagram) -> Socket { + Socket { inner: socket.into() } + } +} + +impl From for net::TcpStream { + fn from(socket: Socket) -> net::TcpStream { + socket.inner.into() + } +} + +impl From for net::TcpListener { + fn from(socket: Socket) -> net::TcpListener { + socket.inner.into() + } +} + +impl From for net::UdpSocket { + fn from(socket: Socket) -> net::UdpSocket { + socket.inner.into() + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixStream { + fn from(socket: Socket) -> UnixStream { + socket.inner.into() + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixListener { + fn from(socket: Socket) -> UnixListener { + socket.inner.into() + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixDatagram { + fn from(socket: Socket) -> UnixDatagram { + socket.inner.into() + } +} + +impl Domain { + /// Domain for IPv4 communication, corresponding to `AF_INET`. + pub fn ipv4() -> Domain { + Domain(c::AF_INET) + } + + /// Domain for IPv6 communication, corresponding to `AF_INET6`. + pub fn ipv6() -> Domain { + Domain(c::AF_INET6) + } + + /// Domain for Unix socket communication, corresponding to `AF_UNIX`. + /// + /// This function is only available on Unix when the `unix` feature is + /// activated. + #[cfg(all(unix, feature = "unix"))] + pub fn unix() -> Domain { + Domain(c::AF_UNIX) + } +} + +impl From for Domain { + fn from(a: i32) -> Domain { + Domain(a) + } +} + +impl From for i32 { + fn from(a: Domain) -> i32 { + a.into() + } +} + +impl Type { + /// Type corresponding to `SOCK_STREAM` + /// + /// Used for protocols such as TCP. + pub fn stream() -> Type { + Type(c::SOCK_STREAM) + } + + /// Type corresponding to `SOCK_DGRAM` + /// + /// Used for protocols such as UDP. + pub fn dgram() -> Type { + Type(c::SOCK_DGRAM) + } + + /// Type corresponding to `SOCK_SEQPACKET` + pub fn seqpacket() -> Type { + Type(c::SOCK_SEQPACKET) + } + + /// Type corresponding to `SOCK_RAW` + pub fn raw() -> Type { + Type(c::SOCK_RAW) + } +} + +impl From for Type { + fn from(a: i32) -> Type { + Type(a) + } +} + +impl From for i32 { + fn from(a: Type) -> i32 { + a.into() + } +} + +impl From for Protocol { + fn from(a: i32) -> Protocol { + Protocol(a) + } +} + +impl From for i32 { + fn from(a: Protocol) -> i32 { + a.into() + } +} + +#[cfg(test)] +mod test { + use std::net::SocketAddr; + + use super::*; + + #[test] + fn connect_timeout_unrouteable() { + // this IP is unroutable, so connections should always time out + let addr = "10.255.255.1:80".parse::().unwrap().into(); + + let socket = Socket::new(Domain::ipv4(), Type::stream(), None).unwrap(); + match socket.connect_timeout(&addr, Duration::from_millis(250)) { + Ok(_) => panic!("unexpected success"), + Err(ref e) if e.kind() == io::ErrorKind::TimedOut => {} + Err(e) => panic!("unexpected error {}", e), + } + } + + #[test] + fn connect_timeout_valid() { + let socket = Socket::new(Domain::ipv4(), Type::stream(), None).unwrap(); + socket.bind(&"127.0.0.1:0".parse::().unwrap().into()).unwrap(); + socket.listen(128).unwrap(); + + let addr = socket.local_addr().unwrap(); + + let socket = Socket::new(Domain::ipv4(), Type::stream(), None).unwrap(); + socket.connect_timeout(&addr, Duration::from_millis(250)).unwrap(); + } + + #[test] + #[cfg(all(unix, feature = "pair", feature = "unix"))] + fn pair() { + let (mut a, mut b) = Socket::pair(Domain::unix(), Type::stream(), None).unwrap(); + a.write_all(b"hello world").unwrap(); + let mut buf = [0; 11]; + b.read_exact(&mut buf).unwrap(); + assert_eq!(buf, &b"hello world"[..]); + } + + #[test] + #[cfg(all(unix, feature = "unix"))] + fn unix() { + use tempdir::TempDir; + + let dir = TempDir::new("unix").unwrap(); + let addr = SockAddr::unix(dir.path().join("sock")).unwrap(); + + let listener = Socket::new(Domain::unix(), Type::stream(), None).unwrap(); + listener.bind(&addr).unwrap(); + listener.listen(10).unwrap(); + + let mut a = Socket::new(Domain::unix(), Type::stream(), None).unwrap(); + a.connect(&addr).unwrap(); + + let mut b = listener.accept().unwrap().0; + + a.write_all(b"hello world").unwrap(); + let mut buf = [0; 11]; + b.read_exact(&mut buf).unwrap(); + assert_eq!(buf, &b"hello world"[..]); + } +} diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/mod.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/mod.rs new file mode 100644 index 000000000..3d1fcb89d --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/mod.rs @@ -0,0 +1,1028 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp; +use std::fmt; +use std::io::{Read, Write, ErrorKind}; +use std::io; +use std::mem; +use std::net::Shutdown; +use std::net::{self, Ipv4Addr, Ipv6Addr}; +use std::ops::Neg; +use std::os::unix::prelude::*; +use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT}; +use std::time::{Duration, Instant}; +#[cfg(feature = "unix")] +use std::os::unix::net::{UnixDatagram, UnixListener, UnixStream}; + +use libc::{self, c_void, c_int}; +use libc::{sockaddr, socklen_t, ssize_t}; + +cfg_if! { + if #[cfg(any(target_os = "dragonfly", target_os = "freebsd", + target_os = "ios", target_os = "macos", + target_os = "openbsd", target_os = "netbsd", + target_os = "solaris", target_os = "haiku"))] { + use libc::IPV6_JOIN_GROUP as IPV6_ADD_MEMBERSHIP; + use libc::IPV6_LEAVE_GROUP as IPV6_DROP_MEMBERSHIP; + } else { + use libc::IPV6_ADD_MEMBERSHIP; + use libc::IPV6_DROP_MEMBERSHIP; + } +} + +cfg_if! { + if #[cfg(any(target_os = "linux", target_os = "android", + target_os = "dragonfly", target_os = "freebsd", + target_os = "openbsd", target_os = "netbsd", + target_os = "haiku", target_os = "bitrig"))] { + use libc::MSG_NOSIGNAL; + } else { + const MSG_NOSIGNAL: c_int = 0x0; + } +} + +cfg_if! { + if #[cfg(any(target_os = "macos", target_os = "ios"))] { + use libc::TCP_KEEPALIVE as KEEPALIVE_OPTION; + } else if #[cfg(any(target_os = "openbsd", target_os = "netbsd", target_os = "haiku"))] { + use libc::SO_KEEPALIVE as KEEPALIVE_OPTION; + } else { + use libc::TCP_KEEPIDLE as KEEPALIVE_OPTION; + } +} + +use SockAddr; +use utils::One; + +#[macro_use] +#[cfg(target_os = "linux")] +mod weak; + +pub struct Socket { + fd: c_int, +} + +impl Socket { + pub fn new(family: c_int, ty: c_int, protocol: c_int) -> io::Result { + unsafe { + // On linux we first attempt to pass the SOCK_CLOEXEC flag to + // atomically create the socket and set it as CLOEXEC. Support for + // this option, however, was added in 2.6.27, and we still support + // 2.6.18 as a kernel, so if the returned error is EINVAL we + // fallthrough to the fallback. + #[cfg(target_os = "linux")] { + match cvt(libc::socket(family, ty | libc::SOCK_CLOEXEC, protocol)) { + Ok(fd) => return Ok(Socket::from_raw_fd(fd)), + Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {} + Err(e) => return Err(e), + } + } + + let fd = cvt(libc::socket(family, ty, protocol))?; + let fd = Socket::from_raw_fd(fd); + set_cloexec(fd.as_raw_fd())?; + #[cfg(target_os = "macos")] { + fd.setsockopt(libc::SOL_SOCKET, libc::SO_NOSIGPIPE, 1i32)?; + } + Ok(fd) + } + } + + pub fn pair(family: c_int, ty: c_int, protocol: c_int) -> io::Result<(Socket, Socket)> { + unsafe { + let mut fds = [0, 0]; + cvt(libc::socketpair(family, ty, protocol, fds.as_mut_ptr()))?; + let fds = (Socket::from_raw_fd(fds[0]), Socket::from_raw_fd(fds[1])); + set_cloexec(fds.0.as_raw_fd())?; + set_cloexec(fds.1.as_raw_fd())?; + #[cfg(target_os = "macos")] { + fds.0.setsockopt(libc::SOL_SOCKET, libc::SO_NOSIGPIPE, 1i32)?; + fds.1.setsockopt(libc::SOL_SOCKET, libc::SO_NOSIGPIPE, 1i32)?; + } + Ok(fds) + } + } + + pub fn bind(&self, addr: &SockAddr) -> io::Result<()> { + unsafe { + cvt(libc::bind(self.fd, addr.as_ptr(), addr.len() as _)).map(|_| ()) + } + } + + pub fn listen(&self, backlog: i32) -> io::Result<()> { + unsafe { + cvt(libc::listen(self.fd, backlog)).map(|_| ()) + } + } + + pub fn connect(&self, addr: &SockAddr) -> io::Result<()> { + unsafe { + cvt(libc::connect(self.fd, addr.as_ptr(), addr.len())).map(|_| ()) + } + } + + pub fn connect_timeout(&self, addr: &SockAddr, timeout: Duration) -> io::Result<()> { + self.set_nonblocking(true)?; + let r = self.connect(addr); + self.set_nonblocking(false)?; + + match r { + Ok(()) => return Ok(()), + // there's no io::ErrorKind conversion registered for EINPROGRESS :( + Err(ref e) if e.raw_os_error() == Some(libc::EINPROGRESS) => {} + Err(e) => return Err(e), + } + + let mut pollfd = libc::pollfd { + fd: self.fd, + events: libc::POLLOUT, + revents: 0, + }; + + if timeout.as_secs() == 0 && timeout.subsec_nanos() == 0 { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot set a 0 duration timeout")); + } + + let start = Instant::now(); + + loop { + let elapsed = start.elapsed(); + if elapsed >= timeout { + return Err(io::Error::new(io::ErrorKind::TimedOut, "connection timed out")); + } + + let timeout = timeout - elapsed; + let mut timeout = timeout.as_secs() + .saturating_mul(1_000) + .saturating_add(timeout.subsec_nanos() as u64 / 1_000_000); + if timeout == 0 { + timeout = 1; + } + + let timeout = cmp::min(timeout, c_int::max_value() as u64) as c_int; + + match unsafe { libc::poll(&mut pollfd, 1, timeout) } { + -1 => { + let err = io::Error::last_os_error(); + if err.kind() != io::ErrorKind::Interrupted { + return Err(err); + } + } + 0 => return Err(io::Error::new(io::ErrorKind::TimedOut, "connection timed out")), + _ => { + if pollfd.revents & libc::POLLOUT == 0 { + if let Some(e) = self.take_error()? { + return Err(e); + } + } + return Ok(()); + } + } + } + } + + pub fn local_addr(&self) -> io::Result { + unsafe { + let mut storage: libc::sockaddr_storage = mem::zeroed(); + let mut len = mem::size_of_val(&storage) as libc::socklen_t; + cvt(libc::getsockname(self.fd, + &mut storage as *mut _ as *mut _, + &mut len))?; + Ok(SockAddr::from_raw_parts(&storage as *const _ as *const _, len)) + } + } + + pub fn peer_addr(&self) -> io::Result { + unsafe { + let mut storage: libc::sockaddr_storage = mem::zeroed(); + let mut len = mem::size_of_val(&storage) as libc::socklen_t; + cvt(libc::getpeername(self.fd, + &mut storage as *mut _ as *mut _, + &mut len))?; + Ok(SockAddr::from_raw_parts(&storage as *const _ as *const _, len)) + } + } + + pub fn try_clone(&self) -> io::Result { + // implementation lifted from libstd + #[cfg(any(target_os = "android", target_os = "haiku"))] + use libc::F_DUPFD as F_DUPFD_CLOEXEC; + #[cfg(not(any(target_os = "android", target_os = "haiku")))] + use libc::F_DUPFD_CLOEXEC; + + static CLOEXEC_FAILED: AtomicBool = ATOMIC_BOOL_INIT; + unsafe { + if !CLOEXEC_FAILED.load(Ordering::Relaxed) { + match cvt(libc::fcntl(self.fd, F_DUPFD_CLOEXEC, 0)) { + Ok(fd) => { + let fd = Socket::from_raw_fd(fd); + if cfg!(target_os = "linux") { + set_cloexec(fd.as_raw_fd())?; + } + return Ok(fd) + } + Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => { + CLOEXEC_FAILED.store(true, Ordering::Relaxed); + } + Err(e) => return Err(e), + } + } + let fd = cvt(libc::fcntl(self.fd, libc::F_DUPFD, 0))?; + let fd = Socket::from_raw_fd(fd); + set_cloexec(fd.as_raw_fd())?; + Ok(fd) + } + } + + #[allow(unused_mut)] + pub fn accept(&self) -> io::Result<(Socket, SockAddr)> { + let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() }; + let mut len = mem::size_of_val(&storage) as socklen_t; + + let mut socket = None; + #[cfg(target_os = "linux")] { + weak! { + fn accept4(c_int, *mut sockaddr, *mut socklen_t, c_int) -> c_int + } + if let Some(f) = accept4.get() { + let res = cvt_r(|| unsafe { + f(self.fd, + &mut storage as *mut _ as *mut _, + &mut len, + libc::SOCK_CLOEXEC) + }); + match res { + Ok(fd) => socket = Some(Socket { fd: fd }), + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => {} + Err(e) => return Err(e), + } + } + } + + let socket = match socket { + Some(socket) => socket, + None => unsafe { + let fd = cvt_r(|| { + libc::accept(self.fd, + &mut storage as *mut _ as *mut _, + &mut len) + })?; + let fd = Socket::from_raw_fd(fd); + set_cloexec(fd.as_raw_fd())?; + fd + } + }; + let addr = unsafe { SockAddr::from_raw_parts(&storage as *const _ as *const _, len) }; + Ok((socket, addr)) + } + + pub fn take_error(&self) -> io::Result> { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, libc::SO_ERROR)?; + if raw == 0 { + Ok(None) + } else { + Ok(Some(io::Error::from_raw_os_error(raw as i32))) + } + } + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + unsafe { + let previous = cvt(libc::fcntl(self.fd, libc::F_GETFL))?; + let new = if nonblocking { + previous | libc::O_NONBLOCK + } else { + previous & !libc::O_NONBLOCK + }; + if new != previous { + cvt(libc::fcntl(self.fd, libc::F_SETFL, new))?; + } + Ok(()) + } + } + + pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { + let how = match how { + Shutdown::Write => libc::SHUT_WR, + Shutdown::Read => libc::SHUT_RD, + Shutdown::Both => libc::SHUT_RDWR, + }; + cvt(unsafe { libc::shutdown(self.fd, how) })?; + Ok(()) + } + + pub fn recv(&self, buf: &mut [u8]) -> io::Result { + unsafe { + let n = cvt({ + libc::recv(self.fd, + buf.as_mut_ptr() as *mut c_void, + cmp::min(buf.len(), max_len()), + 0) + })?; + Ok(n as usize) + } + } + + pub fn peek(&self, buf: &mut [u8]) -> io::Result { + unsafe { + let n = cvt({ + libc::recv(self.fd, + buf.as_mut_ptr() as *mut c_void, + cmp::min(buf.len(), max_len()), + libc::MSG_PEEK) + })?; + Ok(n as usize) + } + } + + pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.recvfrom(buf, 0) + } + + pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.recvfrom(buf, libc::MSG_PEEK) + } + + fn recvfrom(&self, buf: &mut [u8], flags: c_int) + -> io::Result<(usize, SockAddr)> { + unsafe { + let mut storage: libc::sockaddr_storage = mem::zeroed(); + let mut addrlen = mem::size_of_val(&storage) as socklen_t; + + let n = cvt({ + libc::recvfrom(self.fd, + buf.as_mut_ptr() as *mut c_void, + cmp::min(buf.len(), max_len()), + flags, + &mut storage as *mut _ as *mut _, + &mut addrlen) + })?; + let addr = SockAddr::from_raw_parts(&storage as *const _ as *const _, addrlen); + Ok((n as usize, addr)) + } + } + + pub fn send(&self, buf: &[u8]) -> io::Result { + unsafe { + let n = cvt({ + libc::send(self.fd, + buf.as_ptr() as *const c_void, + cmp::min(buf.len(), max_len()), + MSG_NOSIGNAL) + })?; + Ok(n as usize) + } + } + + pub fn send_to(&self, buf: &[u8], addr: &SockAddr) -> io::Result { + unsafe { + let n = cvt({ + libc::sendto(self.fd, + buf.as_ptr() as *const c_void, + cmp::min(buf.len(), max_len()), + MSG_NOSIGNAL, + addr.as_ptr(), + addr.len()) + })?; + Ok(n as usize) + } + } + + // ================================================ + + pub fn ttl(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_IP, libc::IP_TTL)?; + Ok(raw as u32) + } + } + + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_IP, libc::IP_TTL, ttl as c_int) + } + } + + pub fn only_v6(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_IPV6, + libc::IPV6_V6ONLY)?; + Ok(raw != 0) + } + } + + pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_IPV6, + libc::IPV6_V6ONLY, + only_v6 as c_int) + } + } + + pub fn read_timeout(&self) -> io::Result> { + unsafe { + Ok(timeval2dur(self.getsockopt(libc::SOL_SOCKET, libc::SO_RCVTIMEO)?)) + } + } + + pub fn set_read_timeout(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, + libc::SO_RCVTIMEO, + dur2timeval(dur)?) + } + } + + pub fn write_timeout(&self) -> io::Result> { + unsafe { + Ok(timeval2dur(self.getsockopt(libc::SOL_SOCKET, libc::SO_SNDTIMEO)?)) + } + } + + pub fn set_write_timeout(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, + libc::SO_SNDTIMEO, + dur2timeval(dur)?) + } + } + + pub fn nodelay(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_TCP, + libc::TCP_NODELAY)?; + Ok(raw != 0) + } + } + + pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_TCP, + libc::TCP_NODELAY, + nodelay as c_int) + } + } + + pub fn broadcast(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, + libc::SO_BROADCAST)?; + Ok(raw != 0) + } + } + + pub fn set_broadcast(&self, broadcast: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, + libc::SO_BROADCAST, + broadcast as c_int) + } + } + + pub fn multicast_loop_v4(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_IP, + libc::IP_MULTICAST_LOOP)?; + Ok(raw != 0) + } + } + + pub fn set_multicast_loop_v4(&self, multicast_loop_v4: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_IP, + libc::IP_MULTICAST_LOOP, + multicast_loop_v4 as c_int) + } + } + + pub fn multicast_ttl_v4(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_IP, + libc::IP_MULTICAST_TTL)?; + Ok(raw as u32) + } + } + + pub fn set_multicast_ttl_v4(&self, multicast_ttl_v4: u32) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_IP, + libc::IP_MULTICAST_TTL, + multicast_ttl_v4 as c_int) + } + } + + pub fn multicast_loop_v6(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::IPPROTO_IPV6, + libc::IPV6_MULTICAST_LOOP)?; + Ok(raw != 0) + } + } + + pub fn set_multicast_loop_v6(&self, multicast_loop_v6: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::IPPROTO_IPV6, + libc::IPV6_MULTICAST_LOOP, + multicast_loop_v6 as c_int) + } + } + + pub fn join_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + let multiaddr = to_s_addr(multiaddr); + let interface = to_s_addr(interface); + let mreq = libc::ip_mreq { + imr_multiaddr: libc::in_addr { s_addr: multiaddr }, + imr_interface: libc::in_addr { s_addr: interface }, + }; + unsafe { + self.setsockopt(libc::IPPROTO_IP, libc::IP_ADD_MEMBERSHIP, mreq) + } + } + + pub fn join_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + let multiaddr = to_in6_addr(multiaddr); + let mreq = libc::ipv6_mreq { + ipv6mr_multiaddr: multiaddr, + ipv6mr_interface: to_ipv6mr_interface(interface), + }; + unsafe { + self.setsockopt(libc::IPPROTO_IP, IPV6_ADD_MEMBERSHIP, mreq) + } + } + + pub fn leave_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + let multiaddr = to_s_addr(multiaddr); + let interface = to_s_addr(interface); + let mreq = libc::ip_mreq { + imr_multiaddr: libc::in_addr { s_addr: multiaddr }, + imr_interface: libc::in_addr { s_addr: interface }, + }; + unsafe { + self.setsockopt(libc::IPPROTO_IP, libc::IP_DROP_MEMBERSHIP, mreq) + } + } + + pub fn leave_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + let multiaddr = to_in6_addr(multiaddr); + let mreq = libc::ipv6_mreq { + ipv6mr_multiaddr: multiaddr, + ipv6mr_interface: to_ipv6mr_interface(interface), + }; + unsafe { + self.setsockopt(libc::IPPROTO_IP, IPV6_DROP_MEMBERSHIP, mreq) + } + } + + pub fn linger(&self) -> io::Result> { + unsafe { + Ok(linger2dur(self.getsockopt(libc::SOL_SOCKET, libc::SO_LINGER)?)) + } + } + + pub fn set_linger(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, libc::SO_LINGER, dur2linger(dur)) + } + } + + pub fn set_reuse_address(&self, reuse: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, libc::SO_REUSEADDR, reuse as c_int) + } + } + + pub fn reuse_address(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, + libc::SO_REUSEADDR)?; + Ok(raw != 0) + } + } + + pub fn recv_buffer_size(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, libc::SO_RCVBUF)?; + Ok(raw as usize) + } + } + + pub fn set_recv_buffer_size(&self, size: usize) -> io::Result<()> { + unsafe { + // TODO: casting usize to a c_int should be a checked cast + self.setsockopt(libc::SOL_SOCKET, libc::SO_RCVBUF, size as c_int) + } + } + + pub fn send_buffer_size(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, libc::SO_SNDBUF)?; + Ok(raw as usize) + } + } + + pub fn set_send_buffer_size(&self, size: usize) -> io::Result<()> { + unsafe { + // TODO: casting usize to a c_int should be a checked cast + self.setsockopt(libc::SOL_SOCKET, libc::SO_SNDBUF, size as c_int) + } + } + + pub fn keepalive(&self) -> io::Result> { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, + libc::SO_KEEPALIVE)?; + if raw == 0 { + return Ok(None) + } + let secs: c_int = self.getsockopt(libc::IPPROTO_TCP, + KEEPALIVE_OPTION)?; + Ok(Some(Duration::new(secs as u64, 0))) + } + } + + pub fn set_keepalive(&self, keepalive: Option) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, + libc::SO_KEEPALIVE, + keepalive.is_some() as c_int)?; + if let Some(dur) = keepalive { + // TODO: checked cast here + self.setsockopt(libc::IPPROTO_TCP, + KEEPALIVE_OPTION, + (dur.as_secs() / 1000) as c_int)?; + } + Ok(()) + } + } + + #[cfg(all(unix, feature = "reuseport"))] + pub fn reuse_port(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(libc::SOL_SOCKET, + libc::SO_REUSEPORT)?; + Ok(raw != 0) + } + } + + #[cfg(all(unix, feature = "reuseport"))] + pub fn set_reuse_port(&self, reuse: bool) -> io::Result<()> { + unsafe { + self.setsockopt(libc::SOL_SOCKET, libc::SO_REUSEPORT, reuse as c_int) + } + } + + unsafe fn setsockopt(&self, + opt: c_int, + val: c_int, + payload: T) -> io::Result<()> + where T: Copy, + { + let payload = &payload as *const T as *const c_void; + cvt(libc::setsockopt(self.fd, + opt, + val, + payload, + mem::size_of::() as libc::socklen_t))?; + Ok(()) + } + + unsafe fn getsockopt(&self, opt: c_int, val: c_int) -> io::Result { + let mut slot: T = mem::zeroed(); + let mut len = mem::size_of::() as libc::socklen_t; + cvt(libc::getsockopt(self.fd, + opt, + val, + &mut slot as *mut _ as *mut _, + &mut len))?; + assert_eq!(len as usize, mem::size_of::()); + Ok(slot) + } +} + +impl Read for Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + <&Socket>::read(&mut &*self, buf) + } +} + +impl<'a> Read for &'a Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + unsafe { + let n = cvt({ + libc::read(self.fd, + buf.as_mut_ptr() as *mut c_void, + cmp::min(buf.len(), max_len())) + })?; + Ok(n as usize) + } + } +} + +impl Write for Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + <&Socket>::write(&mut &*self, buf) + } + + fn flush(&mut self) -> io::Result<()> { + <&Socket>::flush(&mut &*self) + } +} + +impl<'a> Write for &'a Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.send(buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl fmt::Debug for Socket { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut f = f.debug_struct("Socket"); + f.field("fd", &self.fd); + if let Ok(addr) = self.local_addr() { + f.field("local_addr", &addr); + } + if let Ok(addr) = self.peer_addr() { + f.field("peer_addr", &addr); + } + f.finish() + } +} + +impl AsRawFd for Socket { + fn as_raw_fd(&self) -> c_int { + self.fd + } +} + +impl IntoRawFd for Socket { + fn into_raw_fd(self) -> c_int { + let fd = self.fd; + mem::forget(self); + return fd + } +} + +impl FromRawFd for Socket { + unsafe fn from_raw_fd(fd: c_int) -> Socket { + Socket { fd: fd } + } +} + +impl AsRawFd for ::Socket { + fn as_raw_fd(&self) -> c_int { + self.inner.as_raw_fd() + } +} + +impl IntoRawFd for ::Socket { + fn into_raw_fd(self) -> c_int { + self.inner.into_raw_fd() + } +} + +impl FromRawFd for ::Socket { + unsafe fn from_raw_fd(fd: c_int) -> ::Socket { + ::Socket { inner: Socket::from_raw_fd(fd) } + } +} + +impl Drop for Socket { + fn drop(&mut self) { + unsafe { + let _ = libc::close(self.fd); + } + } +} + +impl From for net::TcpStream { + fn from(socket: Socket) -> net::TcpStream { + unsafe { net::TcpStream::from_raw_fd(socket.into_raw_fd()) } + } +} + +impl From for net::TcpListener { + fn from(socket: Socket) -> net::TcpListener { + unsafe { net::TcpListener::from_raw_fd(socket.into_raw_fd()) } + } +} + +impl From for net::UdpSocket { + fn from(socket: Socket) -> net::UdpSocket { + unsafe { net::UdpSocket::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixStream { + fn from(socket: Socket) -> UnixStream { + unsafe { UnixStream::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixListener { + fn from(socket: Socket) -> UnixListener { + unsafe { UnixListener::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for UnixDatagram { + fn from(socket: Socket) -> UnixDatagram { + unsafe { UnixDatagram::from_raw_fd(socket.into_raw_fd()) } + } +} + +impl From for Socket { + fn from(socket: net::TcpStream) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +impl From for Socket { + fn from(socket: net::TcpListener) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +impl From for Socket { + fn from(socket: net::UdpSocket) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixStream) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixListener) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +#[cfg(all(unix, feature = "unix"))] +impl From for Socket { + fn from(socket: UnixDatagram) -> Socket { + unsafe { Socket::from_raw_fd(socket.into_raw_fd()) } + } +} + +fn max_len() -> usize { + // The maximum read limit on most posix-like systems is `SSIZE_MAX`, + // with the man page quoting that if the count of bytes to read is + // greater than `SSIZE_MAX` the result is "unspecified". + // + // On macOS, however, apparently the 64-bit libc is either buggy or + // intentionally showing odd behavior by rejecting any read with a size + // larger than or equal to INT_MAX. To handle both of these the read + // size is capped on both platforms. + if cfg!(target_os = "macos") { + ::max_value() as usize - 1 + } else { + ::max_value() as usize + } +} + +fn cvt>(t: T) -> io::Result { + let one: T = T::one(); + if t == -one { + Err(io::Error::last_os_error()) + } else { + Ok(t) + } +} + +fn cvt_r(mut f: F) -> io::Result + where F: FnMut() -> T, + T: One + PartialEq + Neg +{ + loop { + match cvt(f()) { + Err(ref e) if e.kind() == ErrorKind::Interrupted => {} + other => return other, + } + } +} + +fn set_cloexec(fd: c_int) -> io::Result<()> { + unsafe { + let previous = cvt(libc::fcntl(fd, libc::F_GETFD))?; + let new = previous | libc::FD_CLOEXEC; + if new != previous { + cvt(libc::fcntl(fd, libc::F_SETFD, new))?; + } + Ok(()) + } +} + +fn dur2timeval(dur: Option) -> io::Result { + match dur { + Some(dur) => { + if dur.as_secs() == 0 && dur.subsec_nanos() == 0 { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot set a 0 duration timeout")); + } + + let secs = if dur.as_secs() > libc::time_t::max_value() as u64 { + libc::time_t::max_value() + } else { + dur.as_secs() as libc::time_t + }; + let mut timeout = libc::timeval { + tv_sec: secs, + tv_usec: (dur.subsec_nanos() / 1000) as libc::suseconds_t, + }; + if timeout.tv_sec == 0 && timeout.tv_usec == 0 { + timeout.tv_usec = 1; + } + Ok(timeout) + } + None => { + Ok(libc::timeval { + tv_sec: 0, + tv_usec: 0, + }) + } + } +} + +fn timeval2dur(raw: libc::timeval) -> Option { + if raw.tv_sec == 0 && raw.tv_usec == 0 { + None + } else { + let sec = raw.tv_sec as u64; + let nsec = (raw.tv_usec as u32) * 1000; + Some(Duration::new(sec, nsec)) + } +} + +fn to_s_addr(addr: &Ipv4Addr) -> libc::in_addr_t { + let octets = addr.octets(); + ::hton(((octets[0] as libc::in_addr_t) << 24) | + ((octets[1] as libc::in_addr_t) << 16) | + ((octets[2] as libc::in_addr_t) << 8) | + ((octets[3] as libc::in_addr_t) << 0)) +} + +fn to_in6_addr(addr: &Ipv6Addr) -> libc::in6_addr { + let mut ret: libc::in6_addr = unsafe { mem::zeroed() }; + ret.s6_addr = addr.octets(); + return ret +} + +#[cfg(target_os = "android")] +fn to_ipv6mr_interface(value: u32) -> c_int { + value as c_int +} + +#[cfg(not(target_os = "android"))] +fn to_ipv6mr_interface(value: u32) -> libc::c_uint { + value as libc::c_uint +} + +fn linger2dur(linger_opt: libc::linger) -> Option { + if linger_opt.l_onoff == 0 { + None + } else { + Some(Duration::from_secs(linger_opt.l_linger as u64)) + } +} + +fn dur2linger(dur: Option) -> libc::linger { + match dur { + Some(d) => { + libc::linger { + l_onoff: 1, + l_linger: d.as_secs() as c_int, + } + } + None => libc::linger { l_onoff: 0, l_linger: 0 }, + } +} diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/weak.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/weak.rs new file mode 100644 index 000000000..83f979d22 --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/unix/weak.rs @@ -0,0 +1,60 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::marker; +use std::mem; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use libc; + +macro_rules! weak { + (fn $name:ident($($t:ty),*) -> $ret:ty) => ( + #[allow(bad_style)] + static $name: ::sys::weak::Weak $ret> = + ::sys::weak::Weak { + name: concat!(stringify!($name), "\0"), + addr: ::std::sync::atomic::ATOMIC_USIZE_INIT, + _marker: ::std::marker::PhantomData, + }; + ) +} + +pub struct Weak { + pub name: &'static str, + pub addr: AtomicUsize, + pub _marker: marker::PhantomData, +} + +impl Weak { + pub fn get(&self) -> Option<&F> { + assert_eq!(mem::size_of::(), mem::size_of::()); + unsafe { + if self.addr.load(Ordering::SeqCst) == 0 { + let ptr = match fetch(self.name) { + 1 => 1, + n => n, + }; + self.addr.store(ptr, Ordering::SeqCst); + } + if self.addr.load(Ordering::SeqCst) == 1 { + None + } else { + mem::transmute::<&AtomicUsize, Option<&F>>(&self.addr) + } + } + } +} + +unsafe fn fetch(name: &str) -> usize { + let name = name.as_bytes(); + assert_eq!(name[name.len() - 1], 0); + libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr() as *const _) as usize +} + diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/windows.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/windows.rs new file mode 100644 index 000000000..21038e3fe --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/sys/windows.rs @@ -0,0 +1,913 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp; +use std::fmt; +use std::io::{Read, Write}; +use std::io; +use std::mem; +use std::net::Shutdown; +use std::net::{self, Ipv4Addr, Ipv6Addr}; +use std::os::windows::prelude::*; +use std::ptr; +use std::sync::{Once, ONCE_INIT}; +use std::time::Duration; + +use kernel32; +use winapi::*; +use ws2_32; + +use SockAddr; + +const HANDLE_FLAG_INHERIT: DWORD = 0x00000001; +const MSG_PEEK: c_int = 0x2; +const SD_BOTH: c_int = 2; +const SD_RECEIVE: c_int = 0; +const SD_SEND: c_int = 1; +const SIO_KEEPALIVE_VALS: DWORD = 0x98000004; +const WSA_FLAG_OVERLAPPED: DWORD = 0x01; + +#[repr(C)] +struct tcp_keepalive { + onoff: c_ulong, + keepalivetime: c_ulong, + keepaliveinterval: c_ulong, +} + +fn init() { + static INIT: Once = ONCE_INIT; + + INIT.call_once(|| { + // Initialize winsock through the standard library by just creating a + // dummy socket. Whether this is successful or not we drop the result as + // libstd will be sure to have initialized winsock. + let _ = net::UdpSocket::bind("127.0.0.1:34254"); + }); +} + +fn last_error() -> io::Error { + io::Error::from_raw_os_error(unsafe { ws2_32::WSAGetLastError() }) +} + +pub struct Socket { + socket: SOCKET, +} + +impl Socket { + pub fn new(family: c_int, ty: c_int, protocol: c_int) -> io::Result { + init(); + unsafe { + let socket = match ws2_32::WSASocketW(family, + ty, + protocol, + ptr::null_mut(), + 0, + WSA_FLAG_OVERLAPPED) { + INVALID_SOCKET => return Err(last_error()), + socket => socket, + }; + let socket = Socket::from_raw_socket(socket); + socket.set_no_inherit()?; + Ok(socket) + } + } + + pub fn bind(&self, addr: &SockAddr) -> io::Result<()> { + unsafe { + if ws2_32::bind(self.socket, addr.as_ptr(), addr.len()) == 0 { + Ok(()) + } else { + Err(last_error()) + } + } + } + + pub fn listen(&self, backlog: i32) -> io::Result<()> { + unsafe { + if ws2_32::listen(self.socket, backlog) == 0 { + Ok(()) + } else { + Err(last_error()) + } + } + } + + pub fn connect(&self, addr: &SockAddr) -> io::Result<()> { + unsafe { + if ws2_32::connect(self.socket, addr.as_ptr(), addr.len()) == 0 { + Ok(()) + } else { + Err(last_error()) + } + } + } + + pub fn connect_timeout(&self, addr: &SockAddr, timeout: Duration) -> io::Result<()> { + self.set_nonblocking(true)?; + let r = self.connect(addr); + self.set_nonblocking(true)?; + + match r { + Ok(()) => return Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {} + Err(e) => return Err(e), + } + + if timeout.as_secs() == 0 && timeout.subsec_nanos() == 0 { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot set a 0 duration timeout")); + } + + let mut timeout = timeval { + tv_sec: timeout.as_secs() as c_long, + tv_usec: (timeout.subsec_nanos() / 1000) as c_long, + }; + if timeout.tv_sec == 0 && timeout.tv_usec == 0 { + timeout.tv_usec = 1; + } + + let fds = unsafe { + let mut fds = mem::zeroed::(); + fds.fd_count = 1; + fds.fd_array[0] = self.socket; + fds + }; + + let mut writefds = fds; + let mut errorfds = fds; + + match unsafe { ws2_32::select(1, ptr::null_mut(), &mut writefds, &mut errorfds, &timeout) } { + SOCKET_ERROR => return Err(io::Error::last_os_error()), + 0 => return Err(io::Error::new(io::ErrorKind::TimedOut, "connection timed out")), + _ => { + if writefds.fd_count != 1 { + if let Some(e) = self.take_error()? { + return Err(e); + } + } + Ok(()) + } + } + } + + pub fn local_addr(&self) -> io::Result { + unsafe { + let mut storage: SOCKADDR_STORAGE = mem::zeroed(); + let mut len = mem::size_of_val(&storage) as c_int; + if ws2_32::getsockname(self.socket, + &mut storage as *mut _ as *mut _, + &mut len) != 0 { + return Err(last_error()) + } + Ok(SockAddr::from_raw_parts(&storage as *const _ as *const _, len)) + } + } + + pub fn peer_addr(&self) -> io::Result { + unsafe { + let mut storage: SOCKADDR_STORAGE = mem::zeroed(); + let mut len = mem::size_of_val(&storage) as c_int; + if ws2_32::getpeername(self.socket, + &mut storage as *mut _ as *mut _, + &mut len) != 0 { + return Err(last_error()) + } + Ok(SockAddr::from_raw_parts(&storage as *const _ as *const _, len)) + } + } + + pub fn try_clone(&self) -> io::Result { + unsafe { + let mut info: WSAPROTOCOL_INFOW = mem::zeroed(); + let r = ws2_32::WSADuplicateSocketW(self.socket, + kernel32::GetCurrentProcessId(), + &mut info); + if r != 0 { + return Err(io::Error::last_os_error()) + } + let socket = ws2_32::WSASocketW(info.iAddressFamily, + info.iSocketType, + info.iProtocol, + &mut info, + 0, + WSA_FLAG_OVERLAPPED); + let socket = match socket { + INVALID_SOCKET => return Err(last_error()), + n => Socket::from_raw_socket(n), + }; + socket.set_no_inherit()?; + Ok(socket) + } + } + + pub fn accept(&self) -> io::Result<(Socket, SockAddr)> { + unsafe { + let mut storage: SOCKADDR_STORAGE = mem::zeroed(); + let mut len = mem::size_of_val(&storage) as c_int; + let socket = { + ws2_32::accept(self.socket, + &mut storage as *mut _ as *mut _, + &mut len) + }; + let socket = match socket { + INVALID_SOCKET => return Err(last_error()), + socket => Socket::from_raw_socket(socket), + }; + socket.set_no_inherit()?; + let addr = SockAddr::from_raw_parts(&storage as *const _ as *const _, len); + Ok((socket, addr)) + } + } + + pub fn take_error(&self) -> io::Result> { + unsafe { + let raw: c_int = self.getsockopt(SOL_SOCKET, SO_ERROR)?; + if raw == 0 { + Ok(None) + } else { + Ok(Some(io::Error::from_raw_os_error(raw as i32))) + } + } + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + unsafe { + let mut nonblocking = nonblocking as c_ulong; + let r = ws2_32::ioctlsocket(self.socket, + FIONBIO as c_int, + &mut nonblocking); + if r == 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + } + } + } + + pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { + + let how = match how { + Shutdown::Write => SD_SEND, + Shutdown::Read => SD_RECEIVE, + Shutdown::Both => SD_BOTH, + }; + if unsafe { ws2_32::shutdown(self.socket, how) == 0 } { + Ok(()) + } else { + Err(last_error()) + } + } + + pub fn recv(&self, buf: &mut [u8]) -> io::Result { + unsafe { + let n = { + ws2_32::recv(self.socket, + buf.as_mut_ptr() as *mut c_char, + clamp(buf.len()), + 0) + }; + match n { + SOCKET_ERROR if ws2_32::WSAGetLastError() == WSAESHUTDOWN as i32 => Ok(0), + SOCKET_ERROR => Err(last_error()), + n => Ok(n as usize) + } + } + } + + pub fn peek(&self, buf: &mut [u8]) -> io::Result { + unsafe { + let n = { + ws2_32::recv(self.socket, + buf.as_mut_ptr() as *mut c_char, + clamp(buf.len()), + MSG_PEEK) + }; + match n { + SOCKET_ERROR if ws2_32::WSAGetLastError() == WSAESHUTDOWN as i32 => Ok(0), + SOCKET_ERROR => Err(last_error()), + n => Ok(n as usize) + } + } + } + + pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.recvfrom(buf, 0) + } + + pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SockAddr)> { + self.recvfrom(buf, MSG_PEEK) + } + + fn recvfrom(&self, buf: &mut [u8], flags: c_int) + -> io::Result<(usize, SockAddr)> { + unsafe { + let mut storage: SOCKADDR_STORAGE = mem::zeroed(); + let mut addrlen = mem::size_of_val(&storage) as c_int; + + let n = { + ws2_32::recvfrom(self.socket, + buf.as_mut_ptr() as *mut c_char, + clamp(buf.len()), + flags, + &mut storage as *mut _ as *mut _, + &mut addrlen) + }; + let n = match n { + SOCKET_ERROR if ws2_32::WSAGetLastError() == WSAESHUTDOWN as i32 => 0, + SOCKET_ERROR => return Err(last_error()), + n => n as usize, + }; + let addr = SockAddr::from_raw_parts(&storage as *const _ as *const _, addrlen); + Ok((n, addr)) + } + } + + pub fn send(&self, buf: &[u8]) -> io::Result { + unsafe { + let n = { + ws2_32::send(self.socket, + buf.as_ptr() as *const c_char, + clamp(buf.len()), + 0) + }; + if n == SOCKET_ERROR { + Err(last_error()) + } else { + Ok(n as usize) + } + } + } + + pub fn send_to(&self, buf: &[u8], addr: &SockAddr) -> io::Result { + unsafe { + let n = { + ws2_32::sendto(self.socket, + buf.as_ptr() as *const c_char, + clamp(buf.len()), + 0, + addr.as_ptr(), + addr.len()) + }; + if n == SOCKET_ERROR { + Err(last_error()) + } else { + Ok(n as usize) + } + } + } + + // ================================================ + + pub fn ttl(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_IP, IP_TTL)?; + Ok(raw as u32) + } + } + + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_IP, IP_TTL, ttl as c_int) + } + } + + pub fn only_v6(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_IPV6.0 as c_int, + IPV6_V6ONLY)?; + Ok(raw != 0) + } + } + + pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_IPV6.0 as c_int, + IPV6_V6ONLY, + only_v6 as c_int) + } + } + + pub fn read_timeout(&self) -> io::Result> { + unsafe { + Ok(ms2dur(self.getsockopt(SOL_SOCKET, SO_RCVTIMEO)?)) + } + } + + pub fn set_read_timeout(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(SOL_SOCKET, SO_RCVTIMEO, dur2ms(dur)?) + } + } + + pub fn write_timeout(&self) -> io::Result> { + unsafe { + Ok(ms2dur(self.getsockopt(SOL_SOCKET, SO_SNDTIMEO)?)) + } + } + + pub fn set_write_timeout(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(SOL_SOCKET, SO_SNDTIMEO, dur2ms(dur)?) + } + } + + pub fn nodelay(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_TCP.0 as c_int, + TCP_NODELAY)?; + Ok(raw != 0) + } + } + + pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_TCP.0 as c_int, + TCP_NODELAY, + nodelay as c_int) + } + } + + pub fn broadcast(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(SOL_SOCKET, SO_BROADCAST)?; + Ok(raw != 0) + } + } + + pub fn set_broadcast(&self, broadcast: bool) -> io::Result<()> { + unsafe { + self.setsockopt(SOL_SOCKET, SO_BROADCAST, broadcast as c_int) + } + } + + pub fn multicast_loop_v4(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_IP, IP_MULTICAST_LOOP)?; + Ok(raw != 0) + } + } + + pub fn set_multicast_loop_v4(&self, multicast_loop_v4: bool) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_IP, + IP_MULTICAST_LOOP, + multicast_loop_v4 as c_int) + } + } + + pub fn multicast_ttl_v4(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_IP, IP_MULTICAST_TTL)?; + Ok(raw as u32) + } + } + + pub fn set_multicast_ttl_v4(&self, multicast_ttl_v4: u32) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_IP, + IP_MULTICAST_TTL, + multicast_ttl_v4 as c_int) + } + } + + pub fn multicast_loop_v6(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(IPPROTO_IPV6.0 as c_int, + IPV6_MULTICAST_LOOP)?; + Ok(raw != 0) + } + } + + pub fn set_multicast_loop_v6(&self, multicast_loop_v6: bool) -> io::Result<()> { + unsafe { + self.setsockopt(IPPROTO_IPV6.0 as c_int, + IPV6_MULTICAST_LOOP, + multicast_loop_v6 as c_int) + } + } + + pub fn join_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + let multiaddr = to_s_addr(multiaddr); + let interface = to_s_addr(interface); + let mreq = ip_mreq { + imr_multiaddr: in_addr { S_un: multiaddr }, + imr_interface: in_addr { S_un: interface }, + }; + unsafe { + self.setsockopt(IPPROTO_IP, IP_ADD_MEMBERSHIP, mreq) + } + } + + pub fn join_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + let multiaddr = to_in6_addr(multiaddr); + let mreq = ipv6_mreq { + ipv6mr_multiaddr: multiaddr, + ipv6mr_interface: interface, + }; + unsafe { + self.setsockopt(IPPROTO_IP, IPV6_ADD_MEMBERSHIP, mreq) + } + } + + pub fn leave_multicast_v4(&self, + multiaddr: &Ipv4Addr, + interface: &Ipv4Addr) -> io::Result<()> { + let multiaddr = to_s_addr(multiaddr); + let interface = to_s_addr(interface); + let mreq = ip_mreq { + imr_multiaddr: in_addr { S_un: multiaddr }, + imr_interface: in_addr { S_un: interface }, + }; + unsafe { + self.setsockopt(IPPROTO_IP, IP_DROP_MEMBERSHIP, mreq) + } + } + + pub fn leave_multicast_v6(&self, + multiaddr: &Ipv6Addr, + interface: u32) -> io::Result<()> { + let multiaddr = to_in6_addr(multiaddr); + let mreq = ipv6_mreq { + ipv6mr_multiaddr: multiaddr, + ipv6mr_interface: interface, + }; + unsafe { + self.setsockopt(IPPROTO_IP, IPV6_DROP_MEMBERSHIP, mreq) + } + } + + pub fn linger(&self) -> io::Result> { + unsafe { + Ok(linger2dur(self.getsockopt(SOL_SOCKET, SO_LINGER)?)) + } + } + + pub fn set_linger(&self, dur: Option) -> io::Result<()> { + unsafe { + self.setsockopt(SOL_SOCKET, SO_LINGER, dur2linger(dur)) + } + } + + pub fn set_reuse_address(&self, reuse: bool) -> io::Result<()> { + unsafe { + self.setsockopt(SOL_SOCKET, SO_REUSEADDR, reuse as c_int) + } + } + + pub fn reuse_address(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(SOL_SOCKET, SO_REUSEADDR)?; + Ok(raw != 0) + } + } + + pub fn recv_buffer_size(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(SOL_SOCKET, SO_RCVBUF)?; + Ok(raw as usize) + } + } + + pub fn set_recv_buffer_size(&self, size: usize) -> io::Result<()> { + unsafe { + // TODO: casting usize to a c_int should be a checked cast + self.setsockopt(SOL_SOCKET, SO_RCVBUF, size as c_int) + } + } + + pub fn send_buffer_size(&self) -> io::Result { + unsafe { + let raw: c_int = self.getsockopt(SOL_SOCKET, SO_SNDBUF)?; + Ok(raw as usize) + } + } + + pub fn set_send_buffer_size(&self, size: usize) -> io::Result<()> { + unsafe { + // TODO: casting usize to a c_int should be a checked cast + self.setsockopt(SOL_SOCKET, SO_SNDBUF, size as c_int) + } + } + + pub fn keepalive(&self) -> io::Result> { + let mut ka = tcp_keepalive { + onoff: 0, + keepalivetime: 0, + keepaliveinterval: 0, + }; + let n = unsafe { + ws2_32::WSAIoctl(self.socket, + SIO_KEEPALIVE_VALS, + 0 as *mut _, + 0, + &mut ka as *mut _ as *mut _, + mem::size_of_val(&ka) as DWORD, + 0 as *mut _, + 0 as *mut _, + None) + }; + if n == 0 { + Ok(if ka.onoff == 0 { + None + } else if ka.keepaliveinterval == 0 { + None + } else { + let seconds = ka.keepaliveinterval / 1000; + let nanos = (ka.keepaliveinterval % 1000) * 1_000_000; + Some(Duration::new(seconds as u64, nanos as u32)) + }) + } else { + Err(last_error()) + } + } + + pub fn set_keepalive(&self, keepalive: Option) -> io::Result<()> { + let ms = dur2ms(keepalive)?; + // TODO: checked casts here + let ka = tcp_keepalive { + onoff: keepalive.is_some() as c_ulong, + keepalivetime: ms as c_ulong, + keepaliveinterval: ms as c_ulong, + }; + let n = unsafe { + ws2_32::WSAIoctl(self.socket, + SIO_KEEPALIVE_VALS, + &ka as *const _ as *mut _, + mem::size_of_val(&ka) as DWORD, + 0 as *mut _, + 0, + 0 as *mut _, + 0 as *mut _, + None) + }; + if n == 0 { + Ok(()) + } else { + Err(last_error()) + } + } + + unsafe fn setsockopt(&self, + opt: c_int, + val: c_int, + payload: T) -> io::Result<()> + where T: Copy, + { + let payload = &payload as *const T as *const c_char; + if ws2_32::setsockopt(self.socket, + opt, + val, + payload, + mem::size_of::() as c_int) == 0 { + Ok(()) + } else { + Err(last_error()) + } + } + + unsafe fn getsockopt(&self, opt: c_int, val: c_int) -> io::Result { + let mut slot: T = mem::zeroed(); + let mut len = mem::size_of::() as c_int; + if ws2_32::getsockopt(self.socket, + opt, + val, + &mut slot as *mut _ as *mut _, + &mut len) == 0 { + assert_eq!(len as usize, mem::size_of::()); + Ok(slot) + } else { + Err(last_error()) + } + } + + fn set_no_inherit(&self) -> io::Result<()> { + unsafe { + let r = kernel32::SetHandleInformation(self.socket as HANDLE, + HANDLE_FLAG_INHERIT, + 0); + if r == 0 { + Err(io::Error::last_os_error()) + } else { + Ok(()) + } + } + } +} + +impl Read for Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + <&Socket>::read(&mut &*self, buf) + } +} + +impl<'a> Read for &'a Socket { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.recv(buf) + } +} + +impl Write for Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + <&Socket>::write(&mut &*self, buf) + } + + fn flush(&mut self) -> io::Result<()> { + <&Socket>::flush(&mut &*self) + } +} + +impl<'a> Write for &'a Socket { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.send(buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl fmt::Debug for Socket { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut f = f.debug_struct("Socket"); + f.field("socket", &self.socket); + if let Ok(addr) = self.local_addr() { + f.field("local_addr", &addr); + } + if let Ok(addr) = self.peer_addr() { + f.field("peer_addr", &addr); + } + f.finish() + } +} + +impl AsRawSocket for Socket { + fn as_raw_socket(&self) -> SOCKET { + self.socket + } +} + +impl IntoRawSocket for Socket { + fn into_raw_socket(self) -> SOCKET { + let socket = self.socket; + mem::forget(self); + return socket + } +} + +impl FromRawSocket for Socket { + unsafe fn from_raw_socket(socket: SOCKET) -> Socket { + Socket { socket: socket } + } +} + +impl AsRawSocket for ::Socket { + fn as_raw_socket(&self) -> SOCKET { + self.inner.as_raw_socket() + } +} + +impl IntoRawSocket for ::Socket { + fn into_raw_socket(self) -> SOCKET { + self.inner.into_raw_socket() + } +} + +impl FromRawSocket for ::Socket { + unsafe fn from_raw_socket(socket: SOCKET) -> ::Socket { + ::Socket { inner: Socket::from_raw_socket(socket) } + } +} + +impl Drop for Socket { + fn drop(&mut self) { + unsafe { + let _ = ws2_32::closesocket(self.socket); + } + } +} + +impl From for net::TcpStream { + fn from(socket: Socket) -> net::TcpStream { + unsafe { net::TcpStream::from_raw_socket(socket.into_raw_socket()) } + } +} + +impl From for net::TcpListener { + fn from(socket: Socket) -> net::TcpListener { + unsafe { net::TcpListener::from_raw_socket(socket.into_raw_socket()) } + } +} + +impl From for net::UdpSocket { + fn from(socket: Socket) -> net::UdpSocket { + unsafe { net::UdpSocket::from_raw_socket(socket.into_raw_socket()) } + } +} + +impl From for Socket { + fn from(socket: net::TcpStream) -> Socket { + unsafe { Socket::from_raw_socket(socket.into_raw_socket()) } + } +} + +impl From for Socket { + fn from(socket: net::TcpListener) -> Socket { + unsafe { Socket::from_raw_socket(socket.into_raw_socket()) } + } +} + +impl From for Socket { + fn from(socket: net::UdpSocket) -> Socket { + unsafe { Socket::from_raw_socket(socket.into_raw_socket()) } + } +} + +fn clamp(input: usize) -> c_int { + cmp::min(input, ::max_value() as usize) as c_int +} + +fn dur2ms(dur: Option) -> io::Result { + match dur { + Some(dur) => { + // Note that a duration is a (u64, u32) (seconds, nanoseconds) + // pair, and the timeouts in windows APIs are typically u32 + // milliseconds. To translate, we have two pieces to take care of: + // + // * Nanosecond precision is rounded up + // * Greater than u32::MAX milliseconds (50 days) is rounded up to + // INFINITE (never time out). + let ms = dur.as_secs().checked_mul(1000).and_then(|ms| { + ms.checked_add((dur.subsec_nanos() as u64) / 1_000_000) + }).and_then(|ms| { + ms.checked_add(if dur.subsec_nanos() % 1_000_000 > 0 {1} else {0}) + }).map(|ms| { + if ms > ::max_value() as u64 { + INFINITE + } else { + ms as DWORD + } + }).unwrap_or(INFINITE); + if ms == 0 { + return Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot set a 0 duration timeout")); + } + Ok(ms) + } + None => Ok(0), + } +} + +fn ms2dur(raw: DWORD) -> Option { + if raw == 0 { + None + } else { + let secs = raw / 1000; + let nsec = (raw % 1000) * 1000000; + Some(Duration::new(secs as u64, nsec as u32)) + } +} + +fn to_s_addr(addr: &Ipv4Addr) -> ULONG { + let octets = addr.octets(); + ::hton(((octets[0] as ULONG) << 24) | + ((octets[1] as ULONG) << 16) | + ((octets[2] as ULONG) << 8) | + ((octets[3] as ULONG) << 0)) +} + +fn to_in6_addr(addr: &Ipv6Addr) -> in6_addr { + let mut ret: in6_addr = unsafe { mem::zeroed() }; + ret.s6_addr = addr.octets(); + return ret +} + +fn linger2dur(linger_opt: linger) -> Option { + if linger_opt.l_onoff == 0 { + None + } else { + Some(Duration::from_secs(linger_opt.l_linger as u64)) + } +} + +fn dur2linger(dur: Option) -> linger { + match dur { + Some(d) => { + linger { + l_onoff: 1, + l_linger: d.as_secs() as u16, + } + } + None => linger { l_onoff: 0, l_linger: 0 }, + } +} diff --git a/collector/compile-benchmarks/cargo/socket2-0.2.3/src/utils.rs b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/utils.rs new file mode 100644 index 000000000..21fe9d61f --- /dev/null +++ b/collector/compile-benchmarks/cargo/socket2-0.2.3/src/utils.rs @@ -0,0 +1,51 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +#[doc(hidden)] +pub trait NetInt { + fn from_be(i: Self) -> Self; + fn to_be(&self) -> Self; +} +macro_rules! doit { + ($($t:ident)*) => ($(impl NetInt for $t { + fn from_be(i: Self) -> Self { <$t>::from_be(i) } + fn to_be(&self) -> Self { <$t>::to_be(*self) } + })*) +} +doit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } + +#[doc(hidden)] +pub trait One { + fn one() -> Self; +} + +macro_rules! one { + ($($t:ident)*) => ($( + impl One for $t { fn one() -> $t { 1 } } + )*) +} + +one! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } + + +#[doc(hidden)] +pub trait Zero { + fn zero() -> Self; +} + +macro_rules! zero { + ($($t:ident)*) => ($( + impl Zero for $t { fn zero() -> $t { 0 } } + )*) +} + +zero! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } + diff --git a/collector/compile-benchmarks/cargo/src/bin/bench.rs b/collector/compile-benchmarks/cargo/src/bin/bench.rs new file mode 100644 index 000000000..1aa82dd9f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/bench.rs @@ -0,0 +1,153 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, MessageFormat, Packages}; +use cargo::util::{CliResult, CliError, Config, CargoErrorKind}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_no_run: bool, + flag_package: Vec, + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_no_fail_fast: bool, + flag_frozen: bool, + flag_locked: bool, + arg_args: Vec, + flag_all: bool, + flag_exclude: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Execute all benchmarks of a local package + +Usage: + cargo bench [options] [--] [...] + +Options: + -h, --help Print this message + --lib Benchmark only this package's library + --bin NAME ... Benchmark only the specified binary + --bins Benchmark all binaries + --example NAME ... Benchmark only the specified example + --examples Benchmark all examples + --test NAME ... Benchmark only the specified test target + --tests Benchmark all tests + --bench NAME ... Benchmark only the specified bench target + --benches Benchmark all benches + --all-targets Benchmark all targets (default) + --no-run Compile, but don't run benchmarks + -p SPEC, --package SPEC ... Package to run benchmarks for + --all Benchmark all packages in the workspace + --exclude SPEC ... Exclude packages from the benchmark + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to build benchmarks for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --no-fail-fast Run all benchmarks regardless of failure + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +All of the trailing arguments are passed to the benchmark binaries generated +for filtering benchmarks and generally providing options configuring how they +run. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be benchmarked. If it is not given, then +the current package is benchmarked. For more information on SPEC and its format, +see the `cargo help pkgid` command. + +All packages in the workspace are benchmarked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The --jobs argument affects the building of the benchmark executable but does +not affect how many jobs are used when running the benchmarks. + +Compilation can be customized with the `bench` profile in the manifest. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-bench; args={:?}", + env::args().collect::>()); + + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let spec = Packages::from_flags(ws.is_virtual(), + options.flag_all, + &options.flag_exclude, + &options.flag_package)?; + + let ops = ops::TestOptions { + no_run: options.flag_no_run, + no_fail_fast: options.flag_no_fail_fast, + only_doc: false, + compile_opts: ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|s| &s[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + release: true, + mode: ops::CompileMode::Bench, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets), + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }, + }; + + let err = ops::run_benches(&ws, &ops, &options.arg_args)?; + match err { + None => Ok(()), + Some(err) => { + Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new("bench failed".into(), i), + None => CliError::new(CargoErrorKind::CargoTestErrorKind(err).into(), 101) + }) + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/bin/build.rs b/collector/compile-benchmarks/cargo/src/bin/build.rs new file mode 100644 index 000000000..883e30db6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/build.rs @@ -0,0 +1,131 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, CompileOptions, MessageFormat, Packages}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_package: Vec, + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_release: bool, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_locked: bool, + flag_frozen: bool, + flag_all: bool, + flag_exclude: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Compile a local package and all of its dependencies + +Usage: + cargo build [options] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC ... Package to build + --all Build all packages in the workspace + --exclude SPEC ... Exclude packages from the build + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --lib Build only this package's library + --bin NAME Build only the specified binary + --bins Build all binaries + --example NAME Build only the specified example + --examples Build all examples + --test NAME Build only the specified test target + --tests Build all tests + --bench NAME Build only the specified bench target + --benches Build all benches + --all-targets Build all targets (lib and bin targets by default) + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to compile + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are built if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the --release flag will use the `release` profile instead. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-build; args={:?}", + env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let spec = Packages::from_flags(ws.is_virtual(), + options.flag_all, + &options.flag_exclude, + &options.flag_package)?; + + let opts = CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + mode: ops::CompileMode::Build, + release: options.flag_release, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets), + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }; + + ops::compile(&ws, &opts)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/cargo.rs b/collector/compile-benchmarks/cargo/src/bin/cargo.rs new file mode 100644 index 000000000..e92bcdbe5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/cargo.rs @@ -0,0 +1,427 @@ +extern crate cargo; +extern crate env_logger; +extern crate git2_curl; +extern crate toml; +#[macro_use] +extern crate log; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; + +use std::collections::BTreeSet; +use std::collections::HashMap; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; + +use cargo::core::shell::{Shell, Verbosity}; +use cargo::util::{self, CliResult, lev_distance, Config, CargoResult, CargoError, CargoErrorKind}; +use cargo::util::CliError; + +#[derive(Deserialize)] +pub struct Flags { + flag_list: bool, + flag_version: bool, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_explain: Option, + arg_command: String, + arg_args: Vec, + flag_locked: bool, + flag_frozen: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +const USAGE: &'static str = " +Rust's package manager + +Usage: + cargo [...] + cargo [options] + +Options: + -h, --help Display this message + -V, --version Print version info and exit + --list List installed commands + --explain CODE Run `rustc --explain CODE` + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +Some common cargo commands are (see all commands with --list): + build Compile the current project + check Analyze the current project and report errors, but don't build object files + clean Remove the target directory + doc Build this project's and its dependencies' documentation + new Create a new cargo project + init Create a new cargo project in an existing directory + run Build and execute src/main.rs + test Run the tests + bench Run the benchmarks + update Update dependencies listed in Cargo.lock + search Search registry for crates + publish Package and upload this project to the registry + install Install a Rust binary + uninstall Uninstall a Rust binary + +See 'cargo help ' for more information on a specific command. +"; + +fn main() { + env_logger::init().unwrap(); + + let mut config = match Config::default() { + Ok(cfg) => cfg, + Err(e) => { + let mut shell = Shell::new(); + cargo::exit_with_error(e.into(), &mut shell) + } + }; + + let result = (|| { + let args: Vec<_> = try!(env::args_os() + .map(|s| { + s.into_string().map_err(|s| { + CargoError::from(format!("invalid unicode in argument: {:?}", s)) + }) + }) + .collect()); + let rest = &args; + cargo::call_main_without_stdin(execute, &mut config, USAGE, rest, true) + })(); + + match result { + Err(e) => cargo::exit_with_error(e, &mut *config.shell()), + Ok(()) => {} + } +} + +macro_rules! each_subcommand{ + ($mac:ident) => { + $mac!(bench); + $mac!(build); + $mac!(check); + $mac!(clean); + $mac!(doc); + $mac!(fetch); + $mac!(generate_lockfile); + $mac!(git_checkout); + $mac!(help); + $mac!(init); + $mac!(install); + $mac!(locate_project); + $mac!(login); + $mac!(metadata); + $mac!(new); + $mac!(owner); + $mac!(package); + $mac!(pkgid); + $mac!(publish); + $mac!(read_manifest); + $mac!(run); + $mac!(rustc); + $mac!(rustdoc); + $mac!(search); + $mac!(test); + $mac!(uninstall); + $mac!(update); + $mac!(verify_project); + $mac!(version); + $mac!(yank); + } +} + +macro_rules! declare_mod { + ($name:ident) => ( pub mod $name; ) +} +each_subcommand!(declare_mod); + +/** + The top-level `cargo` command handles configuration and project location + because they are fundamental (and intertwined). Other commands can rely + on this top-level information. +*/ +fn execute(flags: Flags, config: &mut Config) -> CliResult { + config.configure(flags.flag_verbose, + flags.flag_quiet, + &flags.flag_color, + flags.flag_frozen, + flags.flag_locked, + &flags.flag_z)?; + + init_git_transports(config); + let _token = cargo::util::job::setup(); + + if flags.flag_version { + let version = cargo::version(); + println!("{}", version); + if flags.flag_verbose > 0 { + println!("release: {}.{}.{}", + version.major, + version.minor, + version.patch); + if let Some(ref cfg) = version.cfg_info { + if let Some(ref ci) = cfg.commit_info { + println!("commit-hash: {}", ci.commit_hash); + println!("commit-date: {}", ci.commit_date); + } + } + } + return Ok(()); + } + + if flags.flag_list { + println!("Installed Commands:"); + for command in list_commands(config) { + println!(" {}", command); + } + return Ok(()); + } + + if let Some(ref code) = flags.flag_explain { + let mut procss = config.rustc()?.process(); + procss.arg("--explain").arg(code).exec()?; + return Ok(()); + } + + let args = match &flags.arg_command[..] { + // For the commands `cargo` and `cargo help`, re-execute ourselves as + // `cargo -h` so we can go through the normal process of printing the + // help message. + "" | "help" if flags.arg_args.is_empty() => { + config.shell().set_verbosity(Verbosity::Verbose); + let args = &["cargo".to_string(), "-h".to_string()]; + return cargo::call_main_without_stdin(execute, config, USAGE, args, false); + } + + // For `cargo help -h` and `cargo help --help`, print out the help + // message for `cargo help` + "help" if flags.arg_args[0] == "-h" || flags.arg_args[0] == "--help" => { + vec!["cargo".to_string(), "help".to_string(), "-h".to_string()] + } + + // For `cargo help foo`, print out the usage message for the specified + // subcommand by executing the command with the `-h` flag. + "help" => vec!["cargo".to_string(), flags.arg_args[0].clone(), "-h".to_string()], + + // For all other invocations, we're of the form `cargo foo args...`. We + // use the exact environment arguments to preserve tokens like `--` for + // example. + _ => { + let mut default_alias = HashMap::new(); + default_alias.insert("b", "build".to_string()); + default_alias.insert("t", "test".to_string()); + default_alias.insert("r", "run".to_string()); + let mut args: Vec = env::args().collect(); + if let Some(new_command) = default_alias.get(&args[1][..]) { + args[1] = new_command.clone(); + } + args + } + }; + + if let Some(r) = try_execute_builtin_command(config, &args) { + return r; + } + + let alias_list = aliased_command(config, &args[1])?; + let args = match alias_list { + Some(alias_command) => { + let chain = args.iter() + .take(1) + .chain(alias_command.iter()) + .chain(args.iter().skip(2)) + .map(|s| s.to_string()) + .collect::>(); + if let Some(r) = try_execute_builtin_command(config, &chain) { + return r; + } else { + chain + } + } + None => args, + }; + + execute_external_subcommand(config, &args[1], &args) +} + +fn try_execute_builtin_command(config: &mut Config, args: &[String]) -> Option { + macro_rules! cmd { + ($name:ident) => (if args[1] == stringify!($name).replace("_", "-") { + config.shell().set_verbosity(Verbosity::Verbose); + let r = cargo::call_main_without_stdin($name::execute, + config, + $name::USAGE, + &args, + false); + return Some(r); + }) + } + each_subcommand!(cmd); + + None +} + +fn aliased_command(config: &Config, command: &str) -> CargoResult>> { + let alias_name = format!("alias.{}", command); + let mut result = Ok(None); + match config.get_string(&alias_name) { + Ok(value) => { + if let Some(record) = value { + let alias_commands = record.val + .split_whitespace() + .map(|s| s.to_string()) + .collect(); + result = Ok(Some(alias_commands)); + } + } + Err(_) => { + let value = config.get_list(&alias_name)?; + if let Some(record) = value { + let alias_commands: Vec = record.val + .iter() + .map(|s| s.0.to_string()) + .collect(); + result = Ok(Some(alias_commands)); + } + } + } + result +} + +fn find_closest(config: &Config, cmd: &str) -> Option { + let cmds = list_commands(config); + // Only consider candidates with a lev_distance of 3 or less so we don't + // suggest out-of-the-blue options. + let mut filtered = cmds.iter() + .map(|c| (lev_distance(c, cmd), c)) + .filter(|&(d, _)| d < 4) + .collect::>(); + filtered.sort_by(|a, b| a.0.cmp(&b.0)); + filtered.get(0).map(|slot| slot.1.clone()) +} + +fn execute_external_subcommand(config: &Config, cmd: &str, args: &[String]) -> CliResult { + let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); + let path = search_directories(config) + .iter() + .map(|dir| dir.join(&command_exe)) + .find(|file| is_executable(file)); + let command = match path { + Some(command) => command, + None => { + return Err(CargoError::from(match find_closest(config, cmd) { + Some(closest) => { + format!("no such subcommand: `{}`\n\n\tDid you mean `{}`?\n", + cmd, + closest) + } + None => format!("no such subcommand: `{}`", cmd), + }) + .into()) + } + }; + + let cargo_exe = config.cargo_exe()?; + let err = match util::process(&command) + .env(cargo::CARGO_ENV, cargo_exe) + .args(&args[1..]) + .exec_replace() { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if let &CargoErrorKind::ProcessErrorKind(ref perr) = err.kind() { + if let Some(code) = perr.exit.as_ref().and_then(|c| c.code()) { + return Err(CliError::code(code)); + } + } + Err(CliError::new(err, 101)) +} + +/// List all runnable commands +fn list_commands(config: &Config) -> BTreeSet { + let prefix = "cargo-"; + let suffix = env::consts::EXE_SUFFIX; + let mut commands = BTreeSet::new(); + for dir in search_directories(config) { + let entries = match fs::read_dir(dir) { + Ok(entries) => entries, + _ => continue, + }; + for entry in entries.filter_map(|e| e.ok()) { + let path = entry.path(); + let filename = match path.file_name().and_then(|s| s.to_str()) { + Some(filename) => filename, + _ => continue, + }; + if !filename.starts_with(prefix) || !filename.ends_with(suffix) { + continue; + } + if is_executable(entry.path()) { + let end = filename.len() - suffix.len(); + commands.insert(filename[prefix.len()..end].to_string()); + } + } + } + + macro_rules! add_cmd { + ($cmd:ident) => ({ commands.insert(stringify!($cmd).replace("_", "-")); }) + } + each_subcommand!(add_cmd); + commands +} + +#[cfg(unix)] +fn is_executable>(path: P) -> bool { + use std::os::unix::prelude::*; + fs::metadata(path) + .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0) + .unwrap_or(false) +} +#[cfg(windows)] +fn is_executable>(path: P) -> bool { + fs::metadata(path).map(|metadata| metadata.is_file()).unwrap_or(false) +} + +fn search_directories(config: &Config) -> Vec { + let mut dirs = vec![config.home().clone().into_path_unlocked().join("bin")]; + if let Some(val) = env::var_os("PATH") { + dirs.extend(env::split_paths(&val)); + } + dirs +} + +fn init_git_transports(config: &Config) { + // Only use a custom transport if a proxy is configured, right now libgit2 + // doesn't support proxies and we have to use a custom transport in this + // case. The custom transport, however, is not as well battle-tested. + match cargo::ops::http_proxy_exists(config) { + Ok(true) => {} + _ => return, + } + + let handle = match cargo::ops::http_handle(config) { + Ok(handle) => handle, + Err(..) => return, + }; + + // The unsafety of the registration function derives from two aspects: + // + // 1. This call must be synchronized with all other registration calls as + // well as construction of new transports. + // 2. The argument is leaked. + // + // We're clear on point (1) because this is only called at the start of this + // binary (we know what the state of the world looks like) and we're mostly + // clear on point (2) because we'd only free it after everything is done + // anyway + unsafe { + git2_curl::register(handle); + } +} diff --git a/collector/compile-benchmarks/cargo/src/bin/check.rs b/collector/compile-benchmarks/cargo/src/bin/check.rs new file mode 100644 index 000000000..982204130 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/check.rs @@ -0,0 +1,132 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, CompileOptions, MessageFormat, Packages}; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +pub const USAGE: &'static str = " +Check a local package and all of its dependencies for errors + +Usage: + cargo check [options] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC ... Package(s) to check + --all Check all packages in the workspace + --exclude SPEC ... Exclude packages from the check + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --lib Check only this package's library + --bin NAME Check only the specified binary + --bins Check all binaries + --example NAME Check only the specified example + --examples Check all examples + --test NAME Check only the specified test target + --tests Check all tests + --bench NAME Check only the specified bench target + --benches Check all benches + --all-targets Check all targets (lib and bin targets by default) + --release Check artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also check + --all-features Check all available features + --no-default-features Do not check the `default` feature + --target TRIPLE Check for the target triple + --manifest-path PATH Path to the manifest to compile + -v, --verbose ... Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are checked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the --release flag will use the `release` profile instead. +"; + +#[derive(Deserialize)] +pub struct Options { + flag_package: Vec, + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_release: bool, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_locked: bool, + flag_frozen: bool, + flag_all: bool, + flag_exclude: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-check; args={:?}", + env::args().collect::>()); + + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let spec = Packages::from_flags(ws.is_virtual(), + options.flag_all, + &options.flag_exclude, + &options.flag_package)?; + + let opts = CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + mode: ops::CompileMode::Check, + release: options.flag_release, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets), + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }; + + ops::compile(&ws, &opts)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/clean.rs b/collector/compile-benchmarks/cargo/src/bin/clean.rs new file mode 100644 index 000000000..446b5e502 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/clean.rs @@ -0,0 +1,67 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_package: Vec, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_release: bool, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Remove artifacts that cargo has generated in the past + +Usage: + cargo clean [options] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC ... Package to clean artifacts for + --manifest-path PATH Path to the manifest to the package to clean + --target TRIPLE Target triple to clean output for (default all) + --release Whether or not to clean release artifacts + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +If the --package argument is given, then SPEC is a package id specification +which indicates which package's artifacts should be cleaned out. If it is not +given, then all packages' artifacts are removed. For more information on SPEC +and its format, see the `cargo help pkgid` command. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-clean; args={:?}", env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let opts = ops::CleanOptions { + config: config, + spec: &options.flag_package, + target: options.flag_target.as_ref().map(|s| &s[..]), + release: options.flag_release, + }; + let ws = Workspace::new(&root, config)?; + ops::clean(&ws, &opts)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/doc.rs b/collector/compile-benchmarks/cargo/src/bin/doc.rs new file mode 100644 index 000000000..6e1783696 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/doc.rs @@ -0,0 +1,126 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, MessageFormat, Packages}; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_target: Option, + flag_features: Vec, + flag_all_features: bool, + flag_jobs: Option, + flag_manifest_path: Option, + flag_no_default_features: bool, + flag_no_deps: bool, + flag_open: bool, + flag_release: bool, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_package: Vec, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_frozen: bool, + flag_locked: bool, + flag_all: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Build a package's documentation + +Usage: + cargo doc [options] + +Options: + -h, --help Print this message + --open Opens the docs in a browser after the operation + -p SPEC, --package SPEC ... Package to document + --all Document all packages in the workspace + --no-deps Don't build documentation for dependencies + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --lib Document only this package's library + --bin NAME Document only the specified binary + --bins Document all binaries + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to document + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +By default the documentation for the local package and all dependencies is +built. The output is all placed in `target/doc` in rustdoc's usual format. + +All packages in the workspace are documented if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-check; args={:?}", + env::args().collect::>()); + + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let spec = if options.flag_all || (ws.is_virtual() && options.flag_package.is_empty()) { + Packages::All + } else { + Packages::Packages(&options.flag_package) + }; + + let empty = Vec::new(); + let doc_opts = ops::DocOptions { + open_result: options.flag_open, + compile_opts: ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &empty, false, + &empty, false, + &empty, false, + false), + message_format: options.flag_message_format, + release: options.flag_release, + mode: ops::CompileMode::Doc { + deps: !options.flag_no_deps, + }, + target_rustc_args: None, + target_rustdoc_args: None, + }, + }; + + ops::doc(&ws, &doc_opts)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/fetch.rs b/collector/compile-benchmarks/cargo/src/bin/fetch.rs new file mode 100644 index 000000000..880b77eb6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/fetch.rs @@ -0,0 +1,56 @@ +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Fetch dependencies of a package from the network. + +Usage: + cargo fetch [options] + +Options: + -h, --help Print this message + --manifest-path PATH Path to the manifest to fetch dependencies for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::fetch(&ws)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/generate_lockfile.rs b/collector/compile-benchmarks/cargo/src/bin/generate_lockfile.rs new file mode 100644 index 000000000..11cc83639 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/generate_lockfile.rs @@ -0,0 +1,50 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Generate the lockfile for a project + +Usage: + cargo generate-lockfile [options] + +Options: + -h, --help Print this message + --manifest-path PATH Path to the manifest to generate a lockfile for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-generate-lockfile; args={:?}", env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + + let ws = Workspace::new(&root, config)?; + ops::generate_lockfile(&ws)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/git_checkout.rs b/collector/compile-benchmarks/cargo/src/bin/git_checkout.rs new file mode 100644 index 000000000..ec3ae7a5b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/git_checkout.rs @@ -0,0 +1,54 @@ +use cargo::core::source::{Source, SourceId, GitReference}; +use cargo::sources::git::{GitSource}; +use cargo::util::{Config, CliResult, ToUrl}; + +#[derive(Deserialize)] +pub struct Options { + flag_url: String, + flag_reference: String, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Checkout a copy of a Git repository + +Usage: + cargo git-checkout [options] --url=URL --reference=REF + cargo git-checkout -h | --help + +Options: + -h, --help Print this message + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let Options { flag_url: url, flag_reference: reference, .. } = options; + + let url = url.to_url()?; + + let reference = GitReference::Branch(reference.clone()); + let source_id = SourceId::for_git(&url, reference)?; + + let mut source = GitSource::new(&source_id, config)?; + + source.update()?; + + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/help.rs b/collector/compile-benchmarks/cargo/src/bin/help.rs new file mode 100644 index 000000000..f7f564ee7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/help.rs @@ -0,0 +1,22 @@ +use cargo::util::{CliResult, CliError, Config}; + +#[derive(Deserialize)] +pub struct Options; + +pub const USAGE: &'static str = " +Get some help with a cargo command. + +Usage: + cargo help + cargo help -h | --help + +Options: + -h, --help Print this message +"; + +pub fn execute(_: Options, _: &mut Config) -> CliResult { + // This is a dummy command just so that `cargo help help` works. + // The actual delegation of help flag to subcommands is handled by the + // cargo command. + Err(CliError::new("help command should not be executed directly".into(), 101)) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/init.rs b/collector/compile-benchmarks/cargo/src/bin/init.rs new file mode 100644 index 000000000..9252ddd39 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/init.rs @@ -0,0 +1,73 @@ +use std::env; + +use cargo::ops; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_bin: bool, + flag_lib: bool, + arg_path: Option, + flag_name: Option, + flag_vcs: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Create a new cargo package in an existing directory + +Usage: + cargo init [options] [] + cargo init -h | --help + +Options: + -h, --help Print this message + --vcs VCS Initialize a new repository for the given version + control system (git, hg, pijul, or fossil) or do not + initialize any version control at all (none), overriding + a global configuration. + --bin Use a binary (application) template + --lib Use a library template [default] + --name NAME Set the resulting package name + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-init; args={:?}", env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options; + + let path = &arg_path.unwrap_or_else(|| String::from(".")); + let opts = ops::NewOptions::new(flag_vcs, + flag_bin, + flag_lib, + path, + flag_name.as_ref().map(|s| s.as_ref())); + + let opts_lib = opts.lib; + ops::init(&opts, config)?; + + config.shell().status("Created", format!("{} project", + if opts_lib { "library" } + else {"binary (application)"}))?; + + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/install.rs b/collector/compile-benchmarks/cargo/src/bin/install.rs new file mode 100644 index 000000000..c7062d40c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/install.rs @@ -0,0 +1,163 @@ +use cargo::ops; +use cargo::core::{SourceId, GitReference}; +use cargo::util::{CliResult, Config, ToUrl}; + +#[derive(Deserialize)] +pub struct Options { + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_debug: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_root: Option, + flag_list: bool, + flag_force: bool, + flag_frozen: bool, + flag_locked: bool, + + arg_crate: Vec, + flag_vers: Option, + + flag_git: Option, + flag_branch: Option, + flag_tag: Option, + flag_rev: Option, + + flag_path: Option, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Install a Rust binary + +Usage: + cargo install [options] [...] + cargo install [options] --list + +Specifying what crate to install: + --vers VERS Specify a version to install from crates.io + --git URL Git URL to install the specified crate from + --branch BRANCH Branch to use when installing from git + --tag TAG Tag to use when installing from git + --rev SHA Specific commit to use when installing from git + --path PATH Filesystem path to local crate to install + +Build and install options: + -h, --help Print this message + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + -f, --force Force overwriting existing crates or binaries + --features FEATURES Space-separated list of features to activate + --all-features Build all available features + --no-default-features Do not build the `default` feature + --debug Build in debug mode instead of release mode + --bin NAME Install only the specified binary + --bins Install all binaries + --example NAME Install only the specified example + --examples Install all examples + --root DIR Directory to install packages into + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet Less output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +This command manages Cargo's local set of installed binary crates. Only packages +which have [[bin]] targets can be installed, and all binaries are installed into +the installation root's `bin` folder. The installation root is determined, in +order of precedence, by `--root`, `$CARGO_INSTALL_ROOT`, the `install.root` +configuration key, and finally the home directory (which is either +`$CARGO_HOME` if set or `$HOME/.cargo` by default). + +There are multiple sources from which a crate can be installed. The default +location is crates.io but the `--git` and `--path` flags can change this source. +If the source contains more than one package (such as crates.io or a git +repository with multiple crates) the `` argument is required to indicate +which crate should be installed. + +Crates from crates.io can optionally specify the version they wish to install +via the `--vers` flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the `--bin` argument can selectively install only +one of them, and if you'd rather install examples the `--example` argument can +be used as well. + +By default cargo will refuse to overwrite existing binaries. The `--force` flag +enables overwriting existing binaries. Thus you can reinstall a crate with +`cargo install --force `. + +As a special convenience, omitting the specification entirely will +install the crate in the current directory. That is, `install` is equivalent to +the more explicit `install --path .`. + +The `--list` option will list all installed packages (and their versions). +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let compile_opts = ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: None, + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: ops::Packages::Packages(&[]), + mode: ops::CompileMode::Build, + release: !options.flag_debug, + filter: ops::CompileFilter::new(false, + &options.flag_bin, options.flag_bins, + &[], false, + &options.flag_example, options.flag_examples, + &[], false, + false), + message_format: ops::MessageFormat::Human, + target_rustc_args: None, + target_rustdoc_args: None, + }; + + let source = if let Some(url) = options.flag_git { + let url = url.to_url()?; + let gitref = if let Some(branch) = options.flag_branch { + GitReference::Branch(branch) + } else if let Some(tag) = options.flag_tag { + GitReference::Tag(tag) + } else if let Some(rev) = options.flag_rev { + GitReference::Rev(rev) + } else { + GitReference::Branch("master".to_string()) + }; + SourceId::for_git(&url, gitref)? + } else if let Some(path) = options.flag_path { + SourceId::for_path(&config.cwd().join(path))? + } else if options.arg_crate.is_empty() { + SourceId::for_path(config.cwd())? + } else { + SourceId::crates_io(config)? + }; + + let krates = options.arg_crate.iter().map(|s| &s[..]).collect::>(); + let vers = options.flag_vers.as_ref().map(|s| &s[..]); + let root = options.flag_root.as_ref().map(|s| &s[..]); + + if options.flag_list { + ops::install_list(root, config)?; + } else { + ops::install(root, krates, &source, vers, &compile_opts, options.flag_force)?; + } + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/locate_project.rs b/collector/compile-benchmarks/cargo/src/bin/locate_project.rs new file mode 100644 index 000000000..6e16cca2d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/locate_project.rs @@ -0,0 +1,38 @@ +use cargo; +use cargo::util::{CliResult, CliError, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct LocateProjectFlags { + flag_manifest_path: Option, +} + +pub const USAGE: &'static str = " +Print a JSON representation of a Cargo.toml file's location + +Usage: + cargo locate-project [options] + +Options: + --manifest-path PATH Path to the manifest to locate + -h, --help Print this message +"; + +#[derive(Serialize)] +pub struct ProjectLocation { + root: String +} + +pub fn execute(flags: LocateProjectFlags, config: &mut Config) -> CliResult { + let root = find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())?; + + let string = root.to_str() + .ok_or_else(|| "Your project path contains \ + characters not representable in \ + Unicode".into()) + .map_err(|e| CliError::new(e, 1))?; + + let location = ProjectLocation { root: string.to_string() }; + cargo::print_json(&location); + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/login.rs b/collector/compile-benchmarks/cargo/src/bin/login.rs new file mode 100644 index 000000000..99ce8e755 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/login.rs @@ -0,0 +1,69 @@ +use std::io::prelude::*; +use std::io; + +use cargo::ops; +use cargo::core::{SourceId, Source}; +use cargo::sources::RegistrySource; +use cargo::util::{CliResult, CargoResultExt, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_host: Option, + arg_token: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Save an api token from the registry locally + +Usage: + cargo login [options] [] + +Options: + -h, --help Print this message + --host HOST Host to set the token for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let token = match options.arg_token.clone() { + Some(token) => token, + None => { + let src = SourceId::crates_io(config)?; + let mut src = RegistrySource::remote(&src, config); + src.update()?; + let config = src.config()?.unwrap(); + let host = options.flag_host.clone().unwrap_or(config.api); + println!("please visit {}me and paste the API Token below", host); + let mut line = String::new(); + let input = io::stdin(); + input.lock().read_line(&mut line).chain_err(|| { + "failed to read stdin" + })?; + line + } + }; + + let token = token.trim().to_string(); + ops::registry_login(config, token)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/metadata.rs b/collector/compile-benchmarks/cargo/src/bin/metadata.rs new file mode 100644 index 000000000..d10fe8c44 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/metadata.rs @@ -0,0 +1,75 @@ +use cargo; +use cargo::core::Workspace; +use cargo::ops::{output_metadata, OutputMetadataOptions}; +use cargo::util::important_paths::find_root_manifest_for_wd; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_color: Option, + flag_features: Vec, + flag_all_features: bool, + flag_format_version: Option, + flag_manifest_path: Option, + flag_no_default_features: bool, + flag_no_deps: bool, + flag_quiet: Option, + flag_verbose: u32, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Output the resolved dependencies of a project, the concrete used versions +including overrides, in machine-readable format. + +Usage: + cargo metadata [options] + +Options: + -h, --help Print this message + --features FEATURES Space-separated list of features + --all-features Build all available features + --no-default-features Do not include the `default` feature + --no-deps Output information only about the root package + and don't fetch dependencies. + --manifest-path PATH Path to the manifest + --format-version VERSION Format version + Valid values: 1 + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let manifest = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + + if options.flag_format_version.is_none() { + config.shell().warn("please specify `--format-version` flag explicitly to \ + avoid compatibility problems")? + } + + let options = OutputMetadataOptions { + features: options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + no_deps: options.flag_no_deps, + version: options.flag_format_version.unwrap_or(1), + }; + + let ws = Workspace::new(&manifest, config)?; + let result = output_metadata(&ws, &options)?; + cargo::print_json(&result); + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/new.rs b/collector/compile-benchmarks/cargo/src/bin/new.rs new file mode 100644 index 000000000..c006fd1e4 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/new.rs @@ -0,0 +1,73 @@ +use std::env; + +use cargo::ops; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_bin: bool, + flag_lib: bool, + arg_path: String, + flag_name: Option, + flag_vcs: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Create a new cargo package at + +Usage: + cargo new [options] + cargo new -h | --help + +Options: + -h, --help Print this message + --vcs VCS Initialize a new repository for the given version + control system (git, hg, pijul, or fossil) or do not + initialize any version control at all (none), overriding + a global configuration. + --bin Use a binary (application) template + --lib Use a library template [default] + --name NAME Set the resulting package name, defaults to the value of + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-new; args={:?}", env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options; + + let opts = ops::NewOptions::new(flag_vcs, + flag_bin, + flag_lib, + &arg_path, + flag_name.as_ref().map(|s| s.as_ref())); + + let opts_lib = opts.lib; + ops::new(&opts, config)?; + + config.shell().status("Created", format!("{} `{}` project", + if opts_lib { "library" } + else {"binary (application)"}, + arg_path))?; + + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/owner.rs b/collector/compile-benchmarks/cargo/src/bin/owner.rs new file mode 100644 index 000000000..6c76a6faf --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/owner.rs @@ -0,0 +1,68 @@ +use cargo::ops; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + arg_crate: Option, + flag_token: Option, + flag_add: Option>, + flag_remove: Option>, + flag_index: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_list: bool, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Manage the owners of a crate on the registry + +Usage: + cargo owner [options] [] + +Options: + -h, --help Print this message + -a, --add LOGIN Name of a user or team to add as an owner + -r, --remove LOGIN Name of a user or team to remove as an owner + -l, --list List owners of a crate + --index INDEX Registry index to modify owners for + --token TOKEN API token to use when authenticating + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +This command will modify the owners for a package on the specified registry (or +default). Note that owners of a package can upload new versions, yank old +versions. Explicitly named owners can also modify the set of owners, so take +caution! + +See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation +and troubleshooting. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let opts = ops::OwnersOptions { + krate: options.arg_crate, + token: options.flag_token, + index: options.flag_index, + to_add: options.flag_add, + to_remove: options.flag_remove, + list: options.flag_list, + }; + ops::modify_owners(config, &opts)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/package.rs b/collector/compile-benchmarks/cargo/src/bin/package.rs new file mode 100644 index 000000000..31e3330ad --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/package.rs @@ -0,0 +1,66 @@ +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_target: Option, + flag_manifest_path: Option, + flag_no_verify: bool, + flag_no_metadata: bool, + flag_list: bool, + flag_allow_dirty: bool, + flag_jobs: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Assemble the local package into a distributable tarball + +Usage: + cargo package [options] + +Options: + -h, --help Print this message + -l, --list Print files included in a package without making one + --no-verify Don't verify the contents by building them + --no-metadata Ignore warnings about a lack of human-usable metadata + --allow-dirty Allow dirty working directories to be packaged + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to compile + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::package(&ws, &ops::PackageOpts { + config: config, + verify: !options.flag_no_verify, + list: options.flag_list, + check_metadata: !options.flag_no_metadata, + allow_dirty: options.flag_allow_dirty, + target: options.flag_target.as_ref().map(|t| &t[..]), + jobs: options.flag_jobs, + })?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/pkgid.rs b/collector/compile-benchmarks/cargo/src/bin/pkgid.rs new file mode 100644 index 000000000..01f8a8f6b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/pkgid.rs @@ -0,0 +1,80 @@ +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_manifest_path: Option, + flag_frozen: bool, + flag_locked: bool, + flag_package: Option, + arg_spec: Option, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Print a fully qualified package specification + +Usage: + cargo pkgid [options] [] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC Argument to get the package id specifier for + --manifest-path PATH Path to the manifest to the package to clean + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +Given a argument, print out the fully qualified package id specifier. +This command will generate an error if is ambiguous as to which package +it refers to in the dependency graph. If no is given, then the pkgid for +the local package is printed. + +This command requires that a lockfile is available and dependencies have been +fetched. + +Example Package IDs + + pkgid | name | version | url + |-----------------------------|--------|-----------|---------------------| + foo | foo | * | * + foo:1.2.3 | foo | 1.2.3 | * + crates.io/foo | foo | * | *://crates.io/foo + crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo + crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar + http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo + +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let spec = if options.arg_spec.is_some() { + options.arg_spec + } else if options.flag_package.is_some() { + options.flag_package + } else { + None + }; + let spec = spec.as_ref().map(|s| &s[..]); + let spec = ops::pkgid(&ws, spec)?; + println!("{}", spec); + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/publish.rs b/collector/compile-benchmarks/cargo/src/bin/publish.rs new file mode 100644 index 000000000..c34a0e270 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/publish.rs @@ -0,0 +1,105 @@ +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + flag_index: Option, + flag_host: Option, // TODO: Deprecated, remove + flag_token: Option, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_no_verify: bool, + flag_allow_dirty: bool, + flag_jobs: Option, + flag_dry_run: bool, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Upload a package to the registry + +Usage: + cargo publish [options] + +Options: + -h, --help Print this message + --index INDEX Registry index to upload the package to + --host HOST DEPRECATED, renamed to '--index' + --token TOKEN Token to use when uploading + --no-verify Don't verify package tarball before publish + --allow-dirty Allow publishing with a dirty source directory + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest of the package to publish + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --dry-run Perform all checks without uploading + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let Options { + flag_token: token, + flag_index: index, + flag_host: host, // TODO: Deprecated, remove + flag_manifest_path, + flag_no_verify: no_verify, + flag_allow_dirty: allow_dirty, + flag_jobs: jobs, + flag_dry_run: dry_run, + flag_target: target, + .. + } = options; + + + // TODO: Deprecated + // remove once it has been decided --host can be removed + // We may instead want to repurpose the host flag, as + // mentioned in this issue + // https://github.com/rust-lang/cargo/issues/4208 + let msg = "The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index to which to publish. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning."; + + let root = find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::publish(&ws, &ops::PublishOpts { + config: config, + token: token, + index: + if host.clone().is_none() || host.clone().unwrap().is_empty() { index } + else { config.shell().warn(&msg)?; host }, // TODO: Deprecated, remove + verify: !no_verify, + allow_dirty: allow_dirty, + target: target.as_ref().map(|t| &t[..]), + jobs: jobs, + dry_run: dry_run, + })?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/read_manifest.rs b/collector/compile-benchmarks/cargo/src/bin/read_manifest.rs new file mode 100644 index 000000000..eee2210d8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/read_manifest.rs @@ -0,0 +1,39 @@ +use std::env; + +use cargo; +use cargo::core::Package; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_manifest_path: Option, + flag_color: Option, +} + +pub const USAGE: &'static str = " +Deprecated, use `cargo metadata --no-deps` instead. +Print a JSON representation of a Cargo.toml manifest. + +Usage: + cargo read-manifest [options] + cargo read-manifest -h | --help + +Options: + -h, --help Print this message + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + --manifest-path PATH Path to the manifest + --color WHEN Coloring: auto, always, never +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-read-manifest; args={:?}", + env::args().collect::>()); + config.shell().set_color_choice(options.flag_color.as_ref().map(|s| &s[..]))?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + + let pkg = Package::for_path(&root, config)?; + cargo::print_json(&pkg); + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/run.rs b/collector/compile-benchmarks/cargo/src/bin/run.rs new file mode 100644 index 000000000..21486baf6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/run.rs @@ -0,0 +1,136 @@ +use std::iter::FromIterator; + +use cargo::core::Workspace; +use cargo::ops::{self, MessageFormat, Packages}; +use cargo::util::{CliResult, CliError, Config, CargoErrorKind}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + flag_bin: Option, + flag_example: Option, + flag_package: Option, + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_release: bool, + flag_frozen: bool, + flag_locked: bool, + arg_args: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Run the main binary of the local package (src/main.rs) + +Usage: + cargo run [options] [--] [...] + +Options: + -h, --help Print this message + --bin NAME Name of the bin target to run + --example NAME Name of the example target to run + -p SPEC, --package SPEC Package with the target to run + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to execute + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +If neither `--bin` nor `--example` are given, then if the project only has one +bin target it will be run. Otherwise `--bin` specifies the bin target to run, +and `--example` specifies the example target to run. At most one of `--bin` or +`--example` can be provided. + +All of the trailing arguments are passed to the binary to run. If you're passing +arguments to both Cargo and the binary, the ones after `--` go to the binary, +the ones before go to Cargo. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + + let (mut examples, mut bins) = (Vec::new(), Vec::new()); + if let Some(s) = options.flag_bin { + bins.push(s); + } + if let Some(s) = options.flag_example { + examples.push(s); + } + + let packages = Vec::from_iter(options.flag_package.iter().cloned()); + let spec = Packages::Packages(&packages); + + let compile_opts = ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + release: options.flag_release, + mode: ops::CompileMode::Build, + filter: if examples.is_empty() && bins.is_empty() { + ops::CompileFilter::Default { required_features_filterable: false, } + } else { + ops::CompileFilter::new(false, + &bins, false, + &[], false, + &examples, false, + &[], false, + false) + }, + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }; + + let ws = Workspace::new(&root, config)?; + match ops::run(&ws, &compile_opts, &options.arg_args)? { + None => Ok(()), + Some(err) => { + // If we never actually spawned the process then that sounds pretty + // bad and we always want to forward that up. + let exit = match err.exit { + Some(exit) => exit, + None => return Err( + CliError::new(CargoErrorKind::ProcessErrorKind(err).into(), 101)), + }; + + // If `-q` was passed then we suppress extra error information about + // a failed process, we assume the process itself printed out enough + // information about why it failed so we don't do so as well + let exit_code = exit.code().unwrap_or(101); + Err(if options.flag_quiet == Some(true) { + CliError::code(exit_code) + } else { + CliError::new(CargoErrorKind::ProcessErrorKind(err).into(), exit_code) + }) + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/bin/rustc.rs b/collector/compile-benchmarks/cargo/src/bin/rustc.rs new file mode 100644 index 000000000..e6f5dc540 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/rustc.rs @@ -0,0 +1,140 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, CompileOptions, CompileMode, MessageFormat, Packages}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; +use cargo::util::{CliResult, CliError, Config}; + +#[derive(Deserialize)] +pub struct Options { + arg_opts: Option>, + flag_package: Option, + flag_jobs: Option, + flag_features: Vec, + flag_all_features: bool, + flag_no_default_features: bool, + flag_target: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_release: bool, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_profile: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Compile a package and all of its dependencies + +Usage: + cargo rustc [options] [--] [...] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC Package to build + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --lib Build only this package's library + --bin NAME Build only the specified binary + --bins Build all binaries + --example NAME Build only the specified example + --examples Build all examples + --test NAME Build only the specified test target + --tests Build all tests + --bench NAME Build only the specified bench target + --benches Build all benches + --all-targets Build all targets (lib and bin targets by default) + --release Build artifacts in release mode, with optimizations + --profile PROFILE Profile to build the selected target for + --features FEATURES Features to compile for the package + --all-features Build all available features + --no-default-features Do not compile default features for the package + --target TRIPLE Target triple which compiles will be for + --manifest-path PATH Path to the manifest to fetch dependencies for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +The specified target for the current package (or package specified by SPEC if +provided) will be compiled along with all of its dependencies. The specified +... will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as -L, --extern, and --crate-type, and the specified ... +will simply be added to the compiler invocation. + +This command requires that only one target is being compiled. If more than one +target is available for the current package the filters of --lib, --bin, etc, +must be used to select which target is compiled. To pass flags to all compiler +processes spawned by Cargo, use the $RUSTFLAGS environment variable or the +`build.rustflags` configuration option. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-rustc; args={:?}", + env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, + config.cwd())?; + let mode = match options.flag_profile.as_ref().map(|t| &t[..]) { + Some("dev") | None => CompileMode::Build, + Some("test") => CompileMode::Test, + Some("bench") => CompileMode::Bench, + Some("check") => CompileMode::Check, + Some(mode) => { + let err = format!("unknown profile: `{}`, use dev, + test, or bench", mode).into(); + return Err(CliError::new(err, 101)) + } + }; + + let spec = options.flag_package.map_or_else(Vec::new, |s| vec![s]); + + let opts = CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: Packages::Packages(&spec), + mode: mode, + release: options.flag_release, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets), + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]), + }; + + let ws = Workspace::new(&root, config)?; + ops::compile(&ws, &opts)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/rustdoc.rs b/collector/compile-benchmarks/cargo/src/bin/rustdoc.rs new file mode 100644 index 000000000..156a6b867 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/rustdoc.rs @@ -0,0 +1,127 @@ +use cargo::core::Workspace; +use cargo::ops::{self, MessageFormat, Packages}; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::{find_root_manifest_for_wd}; + +#[derive(Deserialize)] +pub struct Options { + arg_opts: Vec, + flag_target: Option, + flag_features: Vec, + flag_all_features: bool, + flag_jobs: Option, + flag_manifest_path: Option, + flag_no_default_features: bool, + flag_open: bool, + flag_verbose: u32, + flag_release: bool, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_package: Option, + flag_lib: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Build a package's documentation, using specified custom flags. + +Usage: + cargo rustdoc [options] [--] [...] + +Options: + -h, --help Print this message + --open Opens the docs in a browser after the operation + -p SPEC, --package SPEC Package to document + -j N, --jobs N Number of parallel jobs, defaults to # of CPUs + --lib Build only this package's library + --bin NAME Build only the specified binary + --bins Build all binaries + --example NAME Build only the specified example + --examples Build all examples + --test NAME Build only the specified test target + --tests Build all tests + --bench NAME Build only the specified bench target + --benches Build all benches + --all-targets Build all targets (default) + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to document + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +The specified target for the current package (or package specified by SPEC if +provided) will be documented with the specified ... being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as -L, --extern, and --crate-type, and the specified ... will simply be +added to the rustdoc invocation. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, + config.cwd())?; + + let spec = options.flag_package.map_or_else(Vec::new, |s| vec![s]); + + let doc_opts = ops::DocOptions { + open_result: options.flag_open, + compile_opts: ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|t| &t[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: Packages::Packages(&spec), + release: options.flag_release, + filter: ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets), + message_format: options.flag_message_format, + mode: ops::CompileMode::Doc { deps: false }, + target_rustdoc_args: Some(&options.arg_opts), + target_rustc_args: None, + }, + }; + + let ws = Workspace::new(&root, config)?; + ops::doc(&ws, &doc_opts)?; + + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/search.rs b/collector/compile-benchmarks/cargo/src/bin/search.rs new file mode 100644 index 000000000..165dea1c8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/search.rs @@ -0,0 +1,82 @@ +use cargo::ops; +use cargo::util::{CliResult, Config}; + +use std::cmp; + +#[derive(Deserialize)] +pub struct Options { + flag_index: Option, + flag_host: Option, // TODO: Depricated, remove + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_limit: Option, + flag_frozen: bool, + flag_locked: bool, + arg_query: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Search packages in crates.io + +Usage: + cargo search [options] ... + cargo search [-h | --help] + +Options: + -h, --help Print this message + --index INDEX Registry index to search in + --host HOST DEPRECATED, renamed to '--index' + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --limit LIMIT Limit the number of results (default: 10, max: 100) + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let Options { + flag_index: index, + flag_host: host, // TODO: Depricated, remove + flag_limit: limit, + arg_query: query, + .. + } = options; + + // TODO: Depricated + // remove once it has been decided --host can be safely removed + // We may instead want to repurpose the host flag, as + // mentioned in this issue + // https://github.com/rust-lang/cargo/issues/4208 + + let msg = "The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +depricated. The flag is being renamed to 'index', as the flag +wants the location of the index in which to search. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning."; + + let index = if host.clone().is_none() || host.clone().unwrap().is_empty() { + index + } else { + config.shell().warn(&msg)?; + host + }; + + ops::search(&query.join("+"), config, index, cmp::min(100, limit.unwrap_or(10)) as u8)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/test.rs b/collector/compile-benchmarks/cargo/src/bin/test.rs new file mode 100644 index 000000000..2208a18af --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/test.rs @@ -0,0 +1,186 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops::{self, MessageFormat, Packages}; +use cargo::util::{CliResult, CliError, Config, CargoErrorKind}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + arg_args: Vec, + flag_features: Vec, + flag_all_features: bool, + flag_jobs: Option, + flag_manifest_path: Option, + flag_no_default_features: bool, + flag_no_run: bool, + flag_package: Vec, + flag_target: Option, + flag_lib: bool, + flag_doc: bool, + flag_bin: Vec, + flag_bins: bool, + flag_example: Vec, + flag_examples: bool, + flag_test: Vec, + flag_tests: bool, + flag_bench: Vec, + flag_benches: bool, + flag_all_targets: bool, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_message_format: MessageFormat, + flag_release: bool, + flag_no_fail_fast: bool, + flag_frozen: bool, + flag_locked: bool, + flag_all: bool, + flag_exclude: Vec, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Execute all unit and integration tests of a local package + +Usage: + cargo test [options] [--] [...] + +Options: + -h, --help Print this message + --lib Test only this package's library + --doc Test only this library's documentation + --bin NAME ... Test only the specified binary + --bins Test all binaries + --example NAME ... Check that the specified examples compile + --examples Check that all examples compile + --test NAME ... Test only the specified test target + --tests Test all tests + --bench NAME ... Test only the specified bench target + --benches Test all benches + --all-targets Test all targets (default) + --no-run Compile, but don't run tests + -p SPEC, --package SPEC ... Package to run tests for + --all Test all packages in the workspace + --exclude SPEC ... Exclude packages from the test + -j N, --jobs N Number of parallel builds, see below for details + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --all-features Build all available features + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to build tests for + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --message-format FMT Error format: human, json [default: human] + --no-fail-fast Run all tests regardless of failure + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +All of the trailing arguments are passed to the test binaries generated for +filtering tests and generally providing options configuring how they run. For +example, this will run all tests with the name `foo` in their name: + + cargo test foo + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be tested. If it is not given, then the +current package is tested. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are tested if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The --jobs argument affects the building of the test executable but does +not affect how many jobs are used when running the tests. The default value +for the --jobs argument is the number of CPUs. If you want to control the +number of simultaneous running test cases, pass the `--test-threads` option +to the test binaries: + + cargo test -- --test-threads=1 + +Compilation can be configured via the `test` profile in the manifest. + +By default the rust test harness hides output from test execution to +keep results readable. Test output can be recovered (e.g. for debugging) +by passing `--nocapture` to the test binaries: + + cargo test -- --nocapture + +To get the list of all options available for the test binaries use this: + + cargo test -- --help +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-test; args={:?}", + env::args().collect::>()); + + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + + let empty = Vec::new(); + let (mode, filter); + if options.flag_doc { + mode = ops::CompileMode::Doctest; + filter = ops::CompileFilter::new(true, &empty, false, &empty, false, + &empty, false, &empty, false, + false); + } else { + mode = ops::CompileMode::Test; + filter = ops::CompileFilter::new(options.flag_lib, + &options.flag_bin, options.flag_bins, + &options.flag_test, options.flag_tests, + &options.flag_example, options.flag_examples, + &options.flag_bench, options.flag_benches, + options.flag_all_targets); + } + + let spec = Packages::from_flags(ws.is_virtual(), + options.flag_all, + &options.flag_exclude, + &options.flag_package)?; + + let ops = ops::TestOptions { + no_run: options.flag_no_run, + no_fail_fast: options.flag_no_fail_fast, + only_doc: options.flag_doc, + compile_opts: ops::CompileOptions { + config: config, + jobs: options.flag_jobs, + target: options.flag_target.as_ref().map(|s| &s[..]), + features: &options.flag_features, + all_features: options.flag_all_features, + no_default_features: options.flag_no_default_features, + spec: spec, + release: options.flag_release, + mode: mode, + filter: filter, + message_format: options.flag_message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }, + }; + + let err = ops::run_tests(&ws, &ops, &options.arg_args)?; + match err { + None => Ok(()), + Some(err) => { + Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new(err.hint().into(), i), + None => CliError::new(CargoErrorKind::CargoTestErrorKind(err).into(), 101), + }) + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/bin/uninstall.rs b/collector/compile-benchmarks/cargo/src/bin/uninstall.rs new file mode 100644 index 000000000..2adf2d041 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/uninstall.rs @@ -0,0 +1,55 @@ +use cargo::ops; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + flag_bin: Vec, + flag_root: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, + + arg_spec: String, +} + +pub const USAGE: &'static str = " +Remove a Rust binary + +Usage: + cargo uninstall [options] + cargo uninstall (-h | --help) + +Options: + -h, --help Print this message + --root DIR Directory to uninstall packages from + --bin NAME Only uninstall the binary NAME + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet Less output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +The argument SPEC is a package id specification (see `cargo help pkgid`) to +specify which crate should be uninstalled. By default all binaries are +uninstalled for a crate but the `--bin` and `--example` flags can be used to +only uninstall particular binaries. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + + let root = options.flag_root.as_ref().map(|s| &s[..]); + ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/bin/update.rs b/collector/compile-benchmarks/cargo/src/bin/update.rs new file mode 100644 index 000000000..1e7f92b53 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/update.rs @@ -0,0 +1,83 @@ +use std::env; + +use cargo::core::Workspace; +use cargo::ops; +use cargo::util::{CliResult, Config}; +use cargo::util::important_paths::find_root_manifest_for_wd; + +#[derive(Deserialize)] +pub struct Options { + flag_package: Vec, + flag_aggressive: bool, + flag_precise: Option, + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Update dependencies as recorded in the local lock file. + +Usage: + cargo update [options] + +Options: + -h, --help Print this message + -p SPEC, --package SPEC ... Package to update + --aggressive Force updating all dependencies of as well + --precise PRECISE Update a single dependency to exactly PRECISE + --manifest-path PATH Path to the crate's manifest + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +This command requires that a `Cargo.lock` already exists as generated by +`cargo build` or related commands. + +If SPEC is given, then a conservative update of the lockfile will be +performed. This means that only the dependency specified by SPEC will be +updated. Its transitive dependencies will be updated only if SPEC cannot be +updated without updating dependencies. All other dependencies will remain +locked at their currently recorded versions. + +If PRECISE is specified, then --aggressive must not also be specified. The +argument PRECISE is a string representing a precise revision that the package +being updated should be updated to. For example, if the package comes from a git +repository, then PRECISE would be the exact revision that the repository should +be updated to. + +If SPEC is not given, then all dependencies will be re-resolved and +updated. + +For more information about package id specifications, see `cargo help pkgid`. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-update; args={:?}", env::args().collect::>()); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + + let update_opts = ops::UpdateOptions { + aggressive: options.flag_aggressive, + precise: options.flag_precise.as_ref().map(|s| &s[..]), + to_update: &options.flag_package, + config: config, + }; + + let ws = Workspace::new(&root, config)?; + ops::update_lockfile(&ws, &update_opts)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/verify_project.rs b/collector/compile-benchmarks/cargo/src/bin/verify_project.rs new file mode 100644 index 000000000..a05447026 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/verify_project.rs @@ -0,0 +1,77 @@ +use std::collections::HashMap; +use std::fs::File; +use std::io::prelude::*; +use std::process; + +use cargo; +use cargo::util::important_paths::{find_root_manifest_for_wd}; +use cargo::util::{CliResult, Config}; +use serde_json; +use toml; + +#[derive(Deserialize)] +pub struct Flags { + flag_manifest_path: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub const USAGE: &'static str = " +Check correctness of crate manifest + +Usage: + cargo verify-project [options] + cargo verify-project -h | --help + +Options: + -h, --help Print this message + --manifest-path PATH Path to the manifest to verify + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo +"; + +pub fn execute(args: Flags, config: &mut Config) -> CliResult { + config.configure(args.flag_verbose, + args.flag_quiet, + &args.flag_color, + args.flag_frozen, + args.flag_locked, + &args.flag_z)?; + + let mut contents = String::new(); + let filename = args.flag_manifest_path.unwrap_or_else(|| "Cargo.toml".into()); + let filename = match find_root_manifest_for_wd(Some(filename), config.cwd()) { + Ok(manifest_path) => manifest_path, + Err(e) => fail("invalid", &e.to_string()), + }; + + let file = File::open(&filename); + match file.and_then(|mut f| f.read_to_string(&mut contents)) { + Ok(_) => {}, + Err(e) => fail("invalid", &format!("error reading file: {}", e)) + }; + if contents.parse::().is_err() { + fail("invalid", "invalid-format"); + } + + let mut h = HashMap::new(); + h.insert("success".to_string(), "true".to_string()); + cargo::print_json(&h); + Ok(()) +} + +fn fail(reason: &str, value: &str) -> ! { + let mut h = HashMap::new(); + h.insert(reason.to_string(), value.to_string()); + println!("{}", serde_json::to_string(&h).unwrap()); + process::exit(1) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/version.rs b/collector/compile-benchmarks/cargo/src/bin/version.rs new file mode 100644 index 000000000..6d3772f1c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/version.rs @@ -0,0 +1,27 @@ +use std::env; + +use cargo; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options; + +pub const USAGE: &'static str = " +Show version information + +Usage: + cargo version [options] + +Options: + -h, --help Print this message + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + --color WHEN Coloring: auto, always, never +"; + +pub fn execute(_: Options, _: &mut Config) -> CliResult { + debug!("executing; cmd=cargo-version; args={:?}", env::args().collect::>()); + + println!("{}", cargo::version()); + + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/bin/yank.rs b/collector/compile-benchmarks/cargo/src/bin/yank.rs new file mode 100644 index 000000000..a00892a51 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/bin/yank.rs @@ -0,0 +1,63 @@ +use cargo::ops; +use cargo::util::{CliResult, Config}; + +#[derive(Deserialize)] +pub struct Options { + arg_crate: Option, + flag_token: Option, + flag_vers: Option, + flag_index: Option, + flag_verbose: u32, + flag_quiet: Option, + flag_color: Option, + flag_undo: bool, + flag_frozen: bool, + flag_locked: bool, + #[serde(rename = "flag_Z")] + flag_z: Vec, +} + +pub static USAGE: &'static str = " +Remove a pushed crate from the index + +Usage: + cargo yank [options] [] + +Options: + -h, --help Print this message + --vers VERSION The version to yank or un-yank + --undo Undo a yank, putting a version back into the index + --index INDEX Registry index to yank from + --token TOKEN API token to use when authenticating + -v, --verbose ... Use verbose output (-vv very verbose/build.rs output) + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --frozen Require Cargo.lock and cache are up to date + --locked Require Cargo.lock is up to date + -Z FLAG ... Unstable (nightly-only) flags to Cargo + +The yank command removes a previously pushed crate's version from the server's +index. This command does not delete any data, and the crate will still be +available for download via the registry's download link. + +Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version. +"; + +pub fn execute(options: Options, config: &mut Config) -> CliResult { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked, + &options.flag_z)?; + ops::yank(config, + options.arg_crate, + options.flag_vers, + options.flag_token, + options.flag_index, + options.flag_undo)?; + Ok(()) +} + diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/dependency.rs b/collector/compile-benchmarks/cargo/src/cargo/core/dependency.rs new file mode 100644 index 000000000..24d6c9f35 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/dependency.rs @@ -0,0 +1,369 @@ +use std::fmt; +use std::rc::Rc; +use std::str::FromStr; + +use semver::VersionReq; +use semver::ReqParseError; +use serde::ser; + +use core::{SourceId, Summary, PackageId}; +use util::{Cfg, CfgExpr, Config}; +use util::errors::{CargoResult, CargoResultExt, CargoError}; + +/// Information about a dependency requested by a Cargo manifest. +/// Cheap to copy. +#[derive(PartialEq, Clone, Debug)] +pub struct Dependency { + inner: Rc, +} + +/// The data underlying a Dependency. +#[derive(PartialEq, Clone, Debug)] +struct Inner { + name: String, + source_id: SourceId, + req: VersionReq, + specified_req: bool, + kind: Kind, + only_match_name: bool, + + optional: bool, + default_features: bool, + features: Vec, + + // This dependency should be used only for this platform. + // `None` means *all platforms*. + platform: Option, +} + +#[derive(Clone, Debug, PartialEq)] +pub enum Platform { + Name(String), + Cfg(CfgExpr), +} + +#[derive(Serialize)] +struct SerializedDependency<'a> { + name: &'a str, + source: &'a SourceId, + req: String, + kind: Kind, + + optional: bool, + uses_default_features: bool, + features: &'a [String], + target: Option<&'a Platform>, +} + +impl ser::Serialize for Dependency { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + SerializedDependency { + name: self.name(), + source: self.source_id(), + req: self.version_req().to_string(), + kind: self.kind(), + optional: self.is_optional(), + uses_default_features: self.uses_default_features(), + features: self.features(), + target: self.platform(), + }.serialize(s) + } +} + +#[derive(PartialEq, Clone, Debug, Copy)] +pub enum Kind { + Normal, + Development, + Build, +} + +fn parse_req_with_deprecated(req: &str, + extra: Option<(&PackageId, &Config)>) + -> CargoResult { + match VersionReq::parse(req) { + Err(e) => { + let (inside, config) = match extra { + Some(pair) => pair, + None => return Err(e.into()), + }; + match e { + ReqParseError::DeprecatedVersionRequirement(requirement) => { + let msg = format!("\ +parsed version requirement `{}` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of {} {}, and the correct version requirement is `{}`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. +", +req, inside.name(), inside.version(), requirement); + config.shell().warn(&msg)?; + + Ok(requirement) + } + e => Err(e.into()), + } + }, + Ok(v) => Ok(v), + } +} + +impl ser::Serialize for Kind { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + match *self { + Kind::Normal => None, + Kind::Development => Some("dev"), + Kind::Build => Some("build"), + }.serialize(s) + } +} + +impl Dependency { + /// Attempt to create a `Dependency` from an entry in the manifest. + pub fn parse(name: &str, + version: Option<&str>, + source_id: &SourceId, + inside: &PackageId, + config: &Config) -> CargoResult { + let arg = Some((inside, config)); + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(v, arg)?), + None => (false, VersionReq::any()) + }; + + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + + /// Attempt to create a `Dependency` from an entry in the manifest. + pub fn parse_no_deprecated(name: &str, + version: Option<&str>, + source_id: &SourceId) -> CargoResult { + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(v, None)?), + None => (false, VersionReq::any()) + }; + + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + + pub fn new_override(name: &str, source_id: &SourceId) -> Dependency { + Dependency { + inner: Rc::new(Inner { + name: name.to_string(), + source_id: source_id.clone(), + req: VersionReq::any(), + kind: Kind::Normal, + only_match_name: true, + optional: false, + features: Vec::new(), + default_features: true, + specified_req: false, + platform: None, + }), + } + } + + pub fn version_req(&self) -> &VersionReq { + &self.inner.req + } + + pub fn name(&self) -> &str { + &self.inner.name + } + + pub fn source_id(&self) -> &SourceId { + &self.inner.source_id + } + + pub fn kind(&self) -> Kind { + self.inner.kind + } + + pub fn specified_req(&self) -> bool { + self.inner.specified_req + } + + /// If none, this dependencies must be built for all platforms. + /// If some, it must only be built for the specified platform. + pub fn platform(&self) -> Option<&Platform> { + self.inner.platform.as_ref() + } + + pub fn set_kind(&mut self, kind: Kind) -> &mut Dependency { + Rc::make_mut(&mut self.inner).kind = kind; + self + } + + /// Sets the list of features requested for the package. + pub fn set_features(&mut self, features: Vec) -> &mut Dependency { + Rc::make_mut(&mut self.inner).features = features; + self + } + + /// Sets whether the dependency requests default features of the package. + pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).default_features = default_features; + self + } + + /// Sets whether the dependency is optional. + pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).optional = optional; + self + } + + /// Set the source id for this dependency + pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).source_id = id; + self + } + + /// Set the version requirement for this dependency + pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency { + Rc::make_mut(&mut self.inner).req = req; + self + } + + pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { + Rc::make_mut(&mut self.inner).platform = platform; + self + } + + /// Lock this dependency to depending on the specified package id + pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency { + assert_eq!(self.inner.source_id, *id.source_id()); + assert!(self.inner.req.matches(id.version())); + self.set_version_req(VersionReq::exact(id.version())) + .set_source_id(id.source_id().clone()) + } + + /// Returns whether this is a "locked" dependency, basically whether it has + /// an exact version req. + pub fn is_locked(&self) -> bool { + // Kind of a hack to figure this out, but it works! + self.inner.req.to_string().starts_with('=') + } + + /// Returns false if the dependency is only used to build the local package. + pub fn is_transitive(&self) -> bool { + match self.inner.kind { + Kind::Normal | Kind::Build => true, + Kind::Development => false, + } + } + + pub fn is_build(&self) -> bool { + match self.inner.kind { + Kind::Build => true, + _ => false, + } + } + + pub fn is_optional(&self) -> bool { + self.inner.optional + } + + /// Returns true if the default features of the dependency are requested. + pub fn uses_default_features(&self) -> bool { + self.inner.default_features + } + /// Returns the list of features that are requested by the dependency. + pub fn features(&self) -> &[String] { + &self.inner.features + } + + /// Returns true if the package (`sum`) can fulfill this dependency request. + pub fn matches(&self, sum: &Summary) -> bool { + self.matches_id(sum.package_id()) + } + + /// Returns true if the package (`sum`) can fulfill this dependency request. + pub fn matches_ignoring_source(&self, sum: &Summary) -> bool { + self.name() == sum.package_id().name() && + self.version_req().matches(sum.package_id().version()) + } + + /// Returns true if the package (`id`) can fulfill this dependency request. + pub fn matches_id(&self, id: &PackageId) -> bool { + self.inner.name == id.name() && + (self.inner.only_match_name || (self.inner.req.matches(id.version()) && + &self.inner.source_id == id.source_id())) + } + + pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) + -> Dependency { + if self.source_id() != to_replace { + self + } else { + self.set_source_id(replace_with.clone()); + self + } + } +} + +impl Platform { + pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool { + match *self { + Platform::Name(ref p) => p == name, + Platform::Cfg(ref p) => { + match cfg { + Some(cfg) => p.matches(cfg), + None => false, + } + } + } + } +} + +impl ser::Serialize for Platform { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl FromStr for Platform { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + if s.starts_with("cfg(") && s.ends_with(')') { + let s = &s[4..s.len()-1]; + s.parse().map(Platform::Cfg).chain_err(|| { + format!("failed to parse `{}` as a cfg expression", s) + }) + } else { + Ok(Platform::Name(s.to_string())) + } + } +} + +impl fmt::Display for Platform { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Platform::Name(ref n) => n.fmt(f), + Platform::Cfg(ref e) => write!(f, "cfg({})", e), + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/features.rs b/collector/compile-benchmarks/cargo/src/cargo/core/features.rs new file mode 100644 index 000000000..4bfafc174 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/features.rs @@ -0,0 +1,279 @@ +//! Support for nightly features in Cargo itself +//! +//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo +//! itself and is intended to be the avenue for which new features in Cargo are +//! gated by default and then eventually stabilized. All known stable and +//! unstable features are tracked in this file. +//! +//! If you're reading this then you're likely interested in adding a feature to +//! Cargo, and the good news is that it shouldn't be too hard! To do this you'll +//! want to follow these steps: +//! +//! 1. Add your feature. Do this by searching for "look here" in this file and +//! expanding the macro invocation that lists all features with your new +//! feature. +//! +//! 2. Find the appropriate place to place the feature gate in Cargo itself. If +//! you're extending the manifest format you'll likely just want to modify +//! the `Manifest::feature_gate` function, but otherwise you may wish to +//! place the feature gate elsewhere in Cargo. +//! +//! 3. To actually perform the feature gate, you'll want to have code that looks +//! like: +//! +//! ```rust,ignore +//! use core::{Feature, Features}; +//! +//! let feature = Feature::launch_into_space(); +//! package.manifest().features().require(feature).chain_err(|| { +//! "launching Cargo into space right now is unstable and may result in \ +//! unintended damage to your codebase, use with caution" +//! })?; +//! ``` +//! +//! Notably you'll notice the `require` funciton called with your `Feature`, and +//! then you use `chain_err` to tack on more context for why the feature was +//! required when the feature isn't activated. +//! +//! And hopefully that's it! Bear with us though that this is, at the time of +//! this writing, a very new feature in Cargo. If the process differs from this +//! we'll be sure to update this documentation! + +use std::env; + +use util::errors::CargoResult; + +enum Status { + Stable, + Unstable, +} + +macro_rules! features { + ( + pub struct Features { + $([$stab:ident] $feature:ident: bool,)* + } + ) => ( + #[derive(Default, Clone, Debug)] + pub struct Features { + $($feature: bool,)* + activated: Vec, + } + + impl Feature { + $( + pub fn $feature() -> &'static Feature { + fn get(features: &Features) -> bool { + features.$feature + } + static FEAT: Feature = Feature { + name: stringify!($feature), + get: get, + }; + &FEAT + } + )* + + fn is_enabled(&self, features: &Features) -> bool { + (self.get)(features) + } + } + + impl Features { + fn status(&mut self, feature: &str) -> Option<(&mut bool, Status)> { + if feature.contains("_") { + return None + } + let feature = feature.replace("-", "_"); + $( + if feature == stringify!($feature) { + return Some((&mut self.$feature, stab!($stab))) + } + )* + None + } + } + ) +} + +macro_rules! stab { + (stable) => (Status::Stable); + (unstable) => (Status::Unstable); +} + +/// A listing of all features in Cargo +/// +/// "look here" +/// +/// This is the macro that lists all stable and unstable features in Cargo. +/// You'll want to add to this macro whenever you add a feature to Cargo, also +/// following the directions above. +/// +/// Note that all feature names here are valid Rust identifiers, but the `_` +/// character is translated to `-` when specified in the `cargo-features` +/// manifest entry in `Cargo.toml`. +features! { + pub struct Features { + + // A dummy feature that doesn't actually gate anything, but it's used in + // testing to ensure that we can enable stable features. + [stable] test_dummy_stable: bool, + + // A dummy feature that gates the usage of the `im-a-teapot` manifest + // entry. This is basically just intended for tests. + [unstable] test_dummy_unstable: bool, + } +} + +pub struct Feature { + name: &'static str, + get: fn(&Features) -> bool, +} + +impl Features { + pub fn new(features: &[String], + warnings: &mut Vec) -> CargoResult { + let mut ret = Features::default(); + for feature in features { + ret.add(feature, warnings)?; + ret.activated.push(feature.to_string()); + } + Ok(ret) + } + + fn add(&mut self, feature: &str, warnings: &mut Vec) -> CargoResult<()> { + let (slot, status) = match self.status(feature) { + Some(p) => p, + None => bail!("unknown cargo feature `{}`", feature), + }; + + if *slot { + bail!("the cargo feature `{}` has already been activated", feature); + } + + match status { + Status::Stable => { + let warning = format!("the cargo feature `{}` is now stable \ + and is no longer necessary to be listed \ + in the manifest", feature); + warnings.push(warning); + } + Status::Unstable if !nightly_features_allowed() => { + bail!("the cargo feature `{}` requires a nightly version of \ + Cargo, but this is the `{}` channel", + feature, + channel()) + } + Status::Unstable => {} + } + + *slot = true; + + Ok(()) + } + + pub fn activated(&self) -> &[String] { + &self.activated + } + + pub fn require(&self, feature: &Feature) -> CargoResult<()> { + if feature.is_enabled(self) { + Ok(()) + } else { + let feature = feature.name.replace("_", "-"); + let mut msg = format!("feature `{}` is required", feature); + + if nightly_features_allowed() { + let s = format!("\n\nconsider adding `cargo-features = [\"{0}\"]` \ + to the manifest", feature); + msg.push_str(&s); + } else { + let s = format!("\n\n\ + this Cargo does not support nightly features, but if you\n\ + switch to nightly channel you can add\n\ + `cargo-features = [\"{}\"]` to enable this feature", + feature); + msg.push_str(&s); + } + bail!("{}", msg); + } + } +} + +/// A parsed representation of all unstable flags that Cargo accepts. +/// +/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for +/// gating unstable functionality to Cargo. These flags are only available on +/// the nightly channel of Cargo. +/// +/// This struct doesn't have quite the same convenience macro that the features +/// have above, but the procedure should still be relatively stable for adding a +/// new unstable flag: +/// +/// 1. First, add a field to this `CliUnstable` structure. All flags are allowed +/// to have a value as the `-Z` flags are either of the form `-Z foo` or +/// `-Z foo=bar`, and it's up to you how to parse `bar`. +/// +/// 2. Add an arm to the match statement in `CliUnstable::add` below to match on +/// your new flag. The key (`k`) is what you're matching on and the value is +/// in `v`. +/// +/// 3. (optional) Add a new parsing function to parse your datatype. As of now +/// there's an example for `bool`, but more can be added! +/// +/// 4. In Cargo use `config.cli_unstable()` to get a reference to this structure +/// and then test for your flag or your value and act accordingly. +/// +/// If you have any trouble with this, please let us know! +#[derive(Default, Debug)] +pub struct CliUnstable { + pub print_im_a_teapot: bool, +} + +impl CliUnstable { + pub fn parse(&mut self, flags: &[String]) -> CargoResult<()> { + if !flags.is_empty() && !nightly_features_allowed() { + bail!("the `-Z` flag is only accepted on the nightly channel of Cargo") + } + for flag in flags { + self.add(flag)?; + } + Ok(()) + } + + fn add(&mut self, flag: &str) -> CargoResult<()> { + let mut parts = flag.splitn(2, '='); + let k = parts.next().unwrap(); + let v = parts.next(); + + fn parse_bool(value: Option<&str>) -> CargoResult { + match value { + None | + Some("yes") => Ok(true), + Some("no") => Ok(false), + Some(s) => bail!("expected `no` or `yes`, found: {}", s), + } + } + + match k { + "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(v)?, + _ => bail!("unknown `-Z` flag specified: {}", k), + } + + Ok(()) + } +} + +fn channel() -> String { + env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| { + ::version().cfg_info.map(|c| c.release_channel) + .unwrap_or_else(|| String::from("dev")) + }) +} + +fn nightly_features_allowed() -> bool { + match &channel()[..] { + "nightly" | "dev" => true, + _ => false, + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/manifest.rs b/collector/compile-benchmarks/cargo/src/cargo/core/manifest.rs new file mode 100644 index 000000000..d8c3710ed --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/manifest.rs @@ -0,0 +1,709 @@ +use std::collections::{HashMap, BTreeMap}; +use std::fmt; +use std::path::{PathBuf, Path}; +use std::rc::Rc; + +use semver::Version; +use serde::ser; +use url::Url; + +use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec}; +use core::{WorkspaceConfig, Features, Feature}; +use util::Config; +use util::toml::TomlManifest; +use util::errors::*; + +pub enum EitherManifest { + Real(Manifest), + Virtual(VirtualManifest), +} + +/// Contains all the information about a package, as loaded from a Cargo.toml. +#[derive(Clone, Debug)] +pub struct Manifest { + summary: Summary, + targets: Vec, + links: Option, + warnings: Vec, + exclude: Vec, + include: Vec, + metadata: ManifestMetadata, + profiles: Profiles, + publish: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + original: Rc, + features: Features, + im_a_teapot: Option, +} + +/// When parsing `Cargo.toml`, some warnings should silenced +/// if the manifest comes from a dependency. `ManifestWarning` +/// allows this delayed emission of warnings. +#[derive(Clone, Debug)] +pub struct DelayedWarning { + pub message: String, + pub is_critical: bool +} + +#[derive(Clone, Debug)] +pub struct VirtualManifest { + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles, +} + +/// General metadata about a package which is just blindly uploaded to the +/// registry. +/// +/// Note that many of these fields can contain invalid values such as the +/// homepage, repository, documentation, or license. These fields are not +/// validated by cargo itself, but rather it is up to the registry when uploaded +/// to validate these fields. Cargo will itself accept any valid TOML +/// specification for these values. +#[derive(PartialEq, Clone, Debug)] +pub struct ManifestMetadata { + pub authors: Vec, + pub keywords: Vec, + pub categories: Vec, + pub license: Option, + pub license_file: Option, + pub description: Option, // not markdown + pub readme: Option, // file, not contents + pub homepage: Option, // url + pub repository: Option, // url + pub documentation: Option, // url + pub badges: BTreeMap>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum LibKind { + Lib, + Rlib, + Dylib, + ProcMacro, + Other(String), +} + +impl LibKind { + pub fn from_str(string: &str) -> LibKind { + match string { + "lib" => LibKind::Lib, + "rlib" => LibKind::Rlib, + "dylib" => LibKind::Dylib, + "proc-macro" => LibKind::ProcMacro, + s => LibKind::Other(s.to_string()), + } + } + + /// Returns the argument suitable for `--crate-type` to pass to rustc. + pub fn crate_type(&self) -> &str { + match *self { + LibKind::Lib => "lib", + LibKind::Rlib => "rlib", + LibKind::Dylib => "dylib", + LibKind::ProcMacro => "proc-macro", + LibKind::Other(ref s) => s, + } + } + + pub fn linkable(&self) -> bool { + match *self { + LibKind::Lib | + LibKind::Rlib | + LibKind::Dylib | + LibKind::ProcMacro => true, + LibKind::Other(..) => false, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum TargetKind { + Lib(Vec), + Bin, + Test, + Bench, + ExampleLib(Vec), + ExampleBin, + CustomBuild, +} + +impl ser::Serialize for TargetKind { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + use self::TargetKind::*; + match *self { + Lib(ref kinds) => kinds.iter().map(LibKind::crate_type).collect(), + Bin => vec!["bin"], + ExampleBin | ExampleLib(_) => vec!["example"], + Test => vec!["test"], + CustomBuild => vec!["custom-build"], + Bench => vec!["bench"] + }.serialize(s) + } +} + + +// Note that most of the fields here are skipped when serializing because we +// don't want to export them just yet (becomes a public API of Cargo). Others +// though are definitely needed! +#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)] +pub struct Profile { + pub opt_level: String, + #[serde(skip_serializing)] + pub lto: bool, + #[serde(skip_serializing)] + pub codegen_units: Option, // None = use rustc default + #[serde(skip_serializing)] + pub rustc_args: Option>, + #[serde(skip_serializing)] + pub rustdoc_args: Option>, + pub debuginfo: Option, + pub debug_assertions: bool, + pub overflow_checks: bool, + #[serde(skip_serializing)] + pub rpath: bool, + pub test: bool, + #[serde(skip_serializing)] + pub doc: bool, + #[serde(skip_serializing)] + pub run_custom_build: bool, + #[serde(skip_serializing)] + pub check: bool, + #[serde(skip_serializing)] + pub panic: Option, +} + +#[derive(Default, Clone, Debug, PartialEq, Eq)] +pub struct Profiles { + pub release: Profile, + pub dev: Profile, + pub test: Profile, + pub test_deps: Profile, + pub bench: Profile, + pub bench_deps: Profile, + pub doc: Profile, + pub custom_build: Profile, + pub check: Profile, + pub doctest: Profile, +} + +/// Information about a binary, a library, an example, etc. that is part of the +/// package. +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +pub struct Target { + kind: TargetKind, + name: String, + src_path: PathBuf, + required_features: Option>, + tested: bool, + benched: bool, + doc: bool, + doctest: bool, + harness: bool, // whether to use the test harness (--test) + for_host: bool, +} + +#[derive(Serialize)] +struct SerializedTarget<'a> { + /// Is this a `--bin bin`, `--lib`, `--example ex`? + /// Serialized as a list of strings for historical reasons. + kind: &'a TargetKind, + /// Corresponds to `--crate-type` compiler attribute. + /// See https://doc.rust-lang.org/reference.html#linkage + crate_types: Vec<&'a str>, + name: &'a str, + src_path: &'a PathBuf, +} + +impl ser::Serialize for Target { + fn serialize(&self, s: S) -> Result { + SerializedTarget { + kind: &self.kind, + crate_types: self.rustc_crate_types(), + name: &self.name, + src_path: &self.src_path, + }.serialize(s) + } +} + +impl Manifest { + pub fn new(summary: Summary, + targets: Vec, + exclude: Vec, + include: Vec, + links: Option, + metadata: ManifestMetadata, + profiles: Profiles, + publish: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + features: Features, + im_a_teapot: Option, + original: Rc) -> Manifest { + Manifest { + summary: summary, + targets: targets, + warnings: Vec::new(), + exclude: exclude, + include: include, + links: links, + metadata: metadata, + profiles: profiles, + publish: publish, + replace: replace, + patch: patch, + workspace: workspace, + features: features, + original: original, + im_a_teapot: im_a_teapot, + } + } + + pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } + pub fn exclude(&self) -> &[String] { &self.exclude } + pub fn include(&self) -> &[String] { &self.include } + pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } + pub fn name(&self) -> &str { self.package_id().name() } + pub fn package_id(&self) -> &PackageId { self.summary.package_id() } + pub fn summary(&self) -> &Summary { &self.summary } + pub fn targets(&self) -> &[Target] { &self.targets } + pub fn version(&self) -> &Version { self.package_id().version() } + pub fn warnings(&self) -> &[DelayedWarning] { &self.warnings } + pub fn profiles(&self) -> &Profiles { &self.profiles } + pub fn publish(&self) -> bool { self.publish } + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } + pub fn original(&self) -> &TomlManifest { &self.original } + pub fn patch(&self) -> &HashMap> { &self.patch } + pub fn links(&self) -> Option<&str> { + self.links.as_ref().map(|s| &s[..]) + } + + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn features(&self) -> &Features { + &self.features + } + + pub fn add_warning(&mut self, s: String) { + self.warnings.push(DelayedWarning { message: s, is_critical: false }) + } + + pub fn add_critical_warning(&mut self, s: String) { + self.warnings.push(DelayedWarning { message: s, is_critical: true }) + } + + pub fn set_summary(&mut self, summary: Summary) { + self.summary = summary; + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Manifest { + Manifest { + summary: self.summary.map_source(to_replace, replace_with), + ..self + } + } + + pub fn feature_gate(&self) -> CargoResult<()> { + if self.im_a_teapot.is_some() { + self.features.require(Feature::test_dummy_unstable()).chain_err(|| { + "the `im-a-teapot` manifest key is unstable and may not work \ + properly in England" + })?; + } + + Ok(()) + } + + // Just a helper function to test out `-Z` flags on Cargo + pub fn print_teapot(&self, config: &Config) { + if let Some(teapot) = self.im_a_teapot { + if config.cli_unstable().print_im_a_teapot { + println!("im-a-teapot = {}", teapot); + } + } + } +} + +impl VirtualManifest { + pub fn new(replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles) -> VirtualManifest { + VirtualManifest { + replace: replace, + patch: patch, + workspace: workspace, + profiles: profiles, + } + } + + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { + &self.replace + } + + pub fn patch(&self) -> &HashMap> { + &self.patch + } + + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn profiles(&self) -> &Profiles { + &self.profiles + } +} + +impl Target { + fn with_path(src_path: PathBuf) -> Target { + assert!(src_path.is_absolute()); + Target { + kind: TargetKind::Bin, + name: String::new(), + src_path: src_path, + required_features: None, + doc: false, + doctest: false, + harness: true, + for_host: false, + tested: true, + benched: true, + } + } + + pub fn lib_target(name: &str, + crate_targets: Vec, + src_path: PathBuf) -> Target { + Target { + kind: TargetKind::Lib(crate_targets), + name: name.to_string(), + doctest: true, + doc: true, + ..Target::with_path(src_path) + } + } + + pub fn bin_target(name: &str, src_path: PathBuf, + required_features: Option>) -> Target { + Target { + kind: TargetKind::Bin, + name: name.to_string(), + required_features: required_features, + doc: true, + ..Target::with_path(src_path) + } + } + + /// Builds a `Target` corresponding to the `build = "build.rs"` entry. + pub fn custom_build_target(name: &str, src_path: PathBuf) -> Target { + Target { + kind: TargetKind::CustomBuild, + name: name.to_string(), + for_host: true, + benched: false, + tested: false, + ..Target::with_path(src_path) + } + } + + pub fn example_target(name: &str, + crate_targets: Vec, + src_path: PathBuf, + required_features: Option>) -> Target { + let kind = if crate_targets.is_empty() { + TargetKind::ExampleBin + } else { + TargetKind::ExampleLib(crate_targets) + }; + + Target { + kind: kind, + name: name.to_string(), + required_features: required_features, + benched: false, + ..Target::with_path(src_path) + } + } + + pub fn test_target(name: &str, src_path: PathBuf, + required_features: Option>) -> Target { + Target { + kind: TargetKind::Test, + name: name.to_string(), + required_features: required_features, + benched: false, + ..Target::with_path(src_path) + } + } + + pub fn bench_target(name: &str, src_path: PathBuf, + required_features: Option>) -> Target { + Target { + kind: TargetKind::Bench, + name: name.to_string(), + required_features: required_features, + tested: false, + ..Target::with_path(src_path) + } + } + + pub fn name(&self) -> &str { &self.name } + pub fn crate_name(&self) -> String { self.name.replace("-", "_") } + pub fn src_path(&self) -> &Path { &self.src_path } + pub fn required_features(&self) -> Option<&Vec> { self.required_features.as_ref() } + pub fn kind(&self) -> &TargetKind { &self.kind } + pub fn tested(&self) -> bool { self.tested } + pub fn harness(&self) -> bool { self.harness } + pub fn documented(&self) -> bool { self.doc } + pub fn for_host(&self) -> bool { self.for_host } + pub fn benched(&self) -> bool { self.benched } + + pub fn doctested(&self) -> bool { + self.doctest && match self.kind { + TargetKind::Lib(ref kinds) => { + kinds.iter().any(|k| { + *k == LibKind::Rlib || + *k == LibKind::Lib || + *k == LibKind::ProcMacro + }) + } + _ => false, + } + } + + pub fn allows_underscores(&self) -> bool { + self.is_bin() || self.is_example() || self.is_custom_build() + } + + pub fn is_lib(&self) -> bool { + match self.kind { + TargetKind::Lib(_) => true, + _ => false + } + } + + pub fn is_dylib(&self) -> bool { + match self.kind { + TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib), + _ => false + } + } + + pub fn is_cdylib(&self) -> bool { + let libs = match self.kind { + TargetKind::Lib(ref libs) => libs, + _ => return false + }; + libs.iter().any(|l| { + match *l { + LibKind::Other(ref s) => s == "cdylib", + _ => false, + } + }) + } + + pub fn linkable(&self) -> bool { + match self.kind { + TargetKind::Lib(ref kinds) => { + kinds.iter().any(|k| k.linkable()) + } + _ => false + } + } + + pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } + + pub fn is_example(&self) -> bool { + match self.kind { + TargetKind::ExampleBin | + TargetKind::ExampleLib(..) => true, + _ => false + } + } + + pub fn is_bin_example(&self) -> bool { + // Needed for --all-examples in contexts where only runnable examples make sense + match self.kind { + TargetKind::ExampleBin => true, + _ => false + } + } + + pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } + pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } + pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } + + /// Returns the arguments suitable for `--crate-type` to pass to rustc. + pub fn rustc_crate_types(&self) -> Vec<&str> { + match self.kind { + TargetKind::Lib(ref kinds) | + TargetKind::ExampleLib(ref kinds) => { + kinds.iter().map(LibKind::crate_type).collect() + } + TargetKind::CustomBuild | + TargetKind::Bench | + TargetKind::Test | + TargetKind::ExampleBin | + TargetKind::Bin => vec!["bin"], + } + } + + pub fn can_lto(&self) -> bool { + match self.kind { + TargetKind::Lib(ref v) => { + !v.contains(&LibKind::Rlib) && + !v.contains(&LibKind::Dylib) && + !v.contains(&LibKind::Lib) + } + _ => true, + } + } + + pub fn set_tested(&mut self, tested: bool) -> &mut Target { + self.tested = tested; + self + } + pub fn set_benched(&mut self, benched: bool) -> &mut Target { + self.benched = benched; + self + } + pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { + self.doctest = doctest; + self + } + pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { + self.for_host = for_host; + self + } + pub fn set_harness(&mut self, harness: bool) -> &mut Target { + self.harness = harness; + self + } + pub fn set_doc(&mut self, doc: bool) -> &mut Target { + self.doc = doc; + self + } +} + +impl fmt::Display for Target { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.kind { + TargetKind::Lib(..) => write!(f, "Target(lib)"), + TargetKind::Bin => write!(f, "Target(bin: {})", self.name), + TargetKind::Test => write!(f, "Target(test: {})", self.name), + TargetKind::Bench => write!(f, "Target(bench: {})", self.name), + TargetKind::ExampleBin | + TargetKind::ExampleLib(..) => write!(f, "Target(example: {})", self.name), + TargetKind::CustomBuild => write!(f, "Target(script)"), + } + } +} + +impl Profile { + pub fn default_dev() -> Profile { + Profile { + debuginfo: Some(2), + debug_assertions: true, + overflow_checks: true, + ..Profile::default() + } + } + + pub fn default_release() -> Profile { + Profile { + opt_level: "3".to_string(), + debuginfo: None, + ..Profile::default() + } + } + + pub fn default_test() -> Profile { + Profile { + test: true, + ..Profile::default_dev() + } + } + + pub fn default_bench() -> Profile { + Profile { + test: true, + ..Profile::default_release() + } + } + + pub fn default_doc() -> Profile { + Profile { + doc: true, + ..Profile::default_dev() + } + } + + pub fn default_custom_build() -> Profile { + Profile { + run_custom_build: true, + ..Profile::default_dev() + } + } + + pub fn default_check() -> Profile { + Profile { + check: true, + ..Profile::default_dev() + } + } + + pub fn default_doctest() -> Profile { + Profile { + doc: true, + test: true, + ..Profile::default_dev() + } + } +} + +impl Default for Profile { + fn default() -> Profile { + Profile { + opt_level: "0".to_string(), + lto: false, + codegen_units: None, + rustc_args: None, + rustdoc_args: None, + debuginfo: None, + debug_assertions: false, + overflow_checks: false, + rpath: false, + test: false, + doc: false, + run_custom_build: false, + check: false, + panic: None, + } + } +} + +impl fmt::Display for Profile { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.test { + write!(f, "Profile(test)") + } else if self.doc { + write!(f, "Profile(doc)") + } else if self.run_custom_build { + write!(f, "Profile(run)") + } else if self.check { + write!(f, "Profile(check)") + } else { + write!(f, "Profile(build)") + } + + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/core/mod.rs new file mode 100644 index 000000000..6b4e3906f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/mod.rs @@ -0,0 +1,26 @@ +pub use self::dependency::Dependency; +pub use self::features::{Features, Feature, CliUnstable}; +pub use self::manifest::{EitherManifest, VirtualManifest}; +pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles}; +pub use self::package::{Package, PackageSet}; +pub use self::package_id::PackageId; +pub use self::package_id_spec::PackageIdSpec; +pub use self::registry::Registry; +pub use self::resolver::Resolve; +pub use self::shell::{Shell, Verbosity}; +pub use self::source::{Source, SourceId, SourceMap, GitReference}; +pub use self::summary::Summary; +pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; + +pub mod source; +pub mod package; +pub mod package_id; +pub mod dependency; +pub mod manifest; +pub mod resolver; +pub mod summary; +pub mod shell; +pub mod registry; +mod package_id_spec; +mod workspace; +mod features; diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/package.rs b/collector/compile-benchmarks/cargo/src/cargo/core/package.rs new file mode 100644 index 000000000..885dad937 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/package.rs @@ -0,0 +1,219 @@ +use std::cell::{Ref, RefCell}; +use std::collections::{HashMap, BTreeMap}; +use std::fmt; +use std::hash; +use std::path::{Path, PathBuf}; + +use semver::Version; +use serde::ser; +use toml; + +use core::{Dependency, Manifest, PackageId, SourceId, Target}; +use core::{Summary, SourceMap}; +use ops; +use util::{Config, LazyCell, internal, lev_distance}; +use util::errors::{CargoResult, CargoResultExt}; + +/// Information about a package that is available somewhere in the file system. +/// +/// A package is a `Cargo.toml` file plus all the files that are part of it. +// TODO: Is manifest_path a relic? +#[derive(Clone, Debug)] +pub struct Package { + /// The package's manifest + manifest: Manifest, + /// The root of the package + manifest_path: PathBuf, +} + +/// A Package in a form where `Serialize` can be derived. +#[derive(Serialize)] +struct SerializedPackage<'a> { + name: &'a str, + version: &'a str, + id: &'a PackageId, + license: Option<&'a str>, + license_file: Option<&'a str>, + description: Option<&'a str>, + source: &'a SourceId, + dependencies: &'a [Dependency], + targets: &'a [Target], + features: &'a BTreeMap>, + manifest_path: &'a str, +} + +impl ser::Serialize for Package { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + let summary = self.manifest.summary(); + let package_id = summary.package_id(); + let manmeta = self.manifest.metadata(); + let license = manmeta.license.as_ref().map(String::as_ref); + let license_file = manmeta.license_file.as_ref().map(String::as_ref); + let description = manmeta.description.as_ref().map(String::as_ref); + + SerializedPackage { + name: package_id.name(), + version: &package_id.version().to_string(), + id: package_id, + license: license, + license_file: license_file, + description: description, + source: summary.source_id(), + dependencies: summary.dependencies(), + targets: self.manifest.targets(), + features: summary.features(), + manifest_path: &self.manifest_path.display().to_string(), + }.serialize(s) + } +} + +impl Package { + /// Create a package from a manifest and its location + pub fn new(manifest: Manifest, + manifest_path: &Path) -> Package { + Package { + manifest: manifest, + manifest_path: manifest_path.to_path_buf(), + } + } + + /// Calculate the Package from the manifest path (and cargo configuration). + pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult { + let path = manifest_path.parent().unwrap(); + let source_id = SourceId::for_path(path)?; + let (pkg, _) = ops::read_package(manifest_path, &source_id, config)?; + Ok(pkg) + } + + /// Get the manifest dependencies + pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() } + /// Get the manifest + pub fn manifest(&self) -> &Manifest { &self.manifest } + /// Get the path to the manifest + pub fn manifest_path(&self) -> &Path { &self.manifest_path } + /// Get the name of the package + pub fn name(&self) -> &str { self.package_id().name() } + /// Get the PackageId object for the package (fully defines a packge) + pub fn package_id(&self) -> &PackageId { self.manifest.package_id() } + /// Get the root folder of the package + pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() } + /// Get the summary for the package + pub fn summary(&self) -> &Summary { self.manifest.summary() } + /// Get the targets specified in the manifest + pub fn targets(&self) -> &[Target] { self.manifest.targets() } + /// Get the current package version + pub fn version(&self) -> &Version { self.package_id().version() } + /// Get the package authors + pub fn authors(&self) -> &Vec { &self.manifest.metadata().authors } + /// Whether the package is set to publish + pub fn publish(&self) -> bool { self.manifest.publish() } + + /// Whether the package uses a custom build script for any target + pub fn has_custom_build(&self) -> bool { + self.targets().iter().any(|t| t.is_custom_build()) + } + + pub fn find_closest_target(&self, + target: &str, + is_expected_kind: fn(&Target)-> bool) -> Option<&Target> { + let targets = self.targets(); + + let matches = targets.iter().filter(|t| is_expected_kind(t)) + .map(|t| (lev_distance(target, t.name()), t)) + .filter(|&(d, _)| d < 4); + matches.min_by_key(|t| t.0).map(|t| t.1) + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Package { + Package { + manifest: self.manifest.map_source(to_replace, replace_with), + manifest_path: self.manifest_path, + } + } + + pub fn to_registry_toml(&self) -> String { + let manifest = self.manifest().original().prepare_for_publish(); + let toml = toml::to_string(&manifest).unwrap(); + format!("\ + # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\ + #\n\ + # When uploading crates to the registry Cargo will automatically\n\ + # \"normalize\" Cargo.toml files for maximal compatibility\n\ + # with all versions of Cargo and also rewrite `path` dependencies\n\ + # to registry (e.g. crates.io) dependencies\n\ + #\n\ + # If you believe there's an error in this file please file an\n\ + # issue against the rust-lang/cargo repository. If you're\n\ + # editing this file be aware that the upstream Cargo.toml\n\ + # will likely look very different (and much more reasonable)\n\ + \n\ + {}\ + ", toml) + } +} + +impl fmt::Display for Package { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.summary().package_id()) + } +} + +impl PartialEq for Package { + fn eq(&self, other: &Package) -> bool { + self.package_id() == other.package_id() + } +} + +impl Eq for Package {} + +impl hash::Hash for Package { + fn hash(&self, into: &mut H) { + self.package_id().hash(into) + } +} + +pub struct PackageSet<'cfg> { + packages: HashMap>, + sources: RefCell>, +} + +impl<'cfg> PackageSet<'cfg> { + pub fn new(package_ids: &[PackageId], + sources: SourceMap<'cfg>) -> PackageSet<'cfg> { + PackageSet { + packages: package_ids.iter().map(|id| { + (id.clone(), LazyCell::new()) + }).collect(), + sources: RefCell::new(sources), + } + } + + pub fn package_ids<'a>(&'a self) -> Box + 'a> { + Box::new(self.packages.keys()) + } + + pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { + let slot = self.packages.get(id).ok_or_else(|| { + internal(format!("couldn't find `{}` in package set", id)) + })?; + if let Some(pkg) = slot.borrow() { + return Ok(pkg) + } + let mut sources = self.sources.borrow_mut(); + let source = sources.get_mut(id.source_id()).ok_or_else(|| { + internal(format!("couldn't find source for `{}`", id)) + })?; + let pkg = source.download(id).chain_err(|| { + "unable to get packages from source" + })?; + assert!(slot.fill(pkg).is_ok()); + Ok(slot.borrow().unwrap()) + } + + pub fn sources(&self) -> Ref> { + self.sources.borrow() + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/package_id.rs b/collector/compile-benchmarks/cargo/src/cargo/core/package_id.rs new file mode 100644 index 000000000..908798555 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/package_id.rs @@ -0,0 +1,190 @@ +use std::cmp::Ordering; +use std::fmt::{self, Formatter}; +use std::hash::Hash; +use std::hash; +use std::path::Path; +use std::sync::Arc; + +use semver; +use serde::de; +use serde::ser; + +use util::{CargoResult, ToSemver}; +use core::source::SourceId; + +/// Identifier for a specific version of a package in a specific source. +#[derive(Clone)] +pub struct PackageId { + inner: Arc, +} + +#[derive(PartialEq, PartialOrd, Eq, Ord)] +struct PackageIdInner { + name: String, + version: semver::Version, + source_id: SourceId, +} + +impl ser::Serialize for PackageId { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer + { + s.collect_str(&format_args!("{} {} ({})", + self.inner.name, + self.inner.version, + self.inner.source_id.to_url())) + } +} + +impl<'de> de::Deserialize<'de> for PackageId { + fn deserialize(d: D) -> Result + where D: de::Deserializer<'de> + { + let string = String::deserialize(d)?; + let mut s = string.splitn(3, ' '); + let name = s.next().unwrap(); + let version = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let version = semver::Version::parse(version) + .map_err(de::Error::custom)?; + let url = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let url = if url.starts_with('(') && url.ends_with(')') { + &url[1..url.len() - 1] + } else { + return Err(de::Error::custom("invalid serialized PackageId")) + + }; + let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; + + Ok(PackageId { + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: version, + source_id: source_id, + }), + }) + } +} + +impl Hash for PackageId { + fn hash(&self, state: &mut S) { + self.inner.name.hash(state); + self.inner.version.hash(state); + self.inner.source_id.hash(state); + } +} + +impl PartialEq for PackageId { + fn eq(&self, other: &PackageId) -> bool { + (*self.inner).eq(&*other.inner) + } +} +impl PartialOrd for PackageId { + fn partial_cmp(&self, other: &PackageId) -> Option { + (*self.inner).partial_cmp(&*other.inner) + } +} +impl Eq for PackageId {} +impl Ord for PackageId { + fn cmp(&self, other: &PackageId) -> Ordering { + (*self.inner).cmp(&*other.inner) + } +} + +impl PackageId { + pub fn new(name: &str, version: T, + sid: &SourceId) -> CargoResult { + let v = version.to_semver()?; + Ok(PackageId { + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: v, + source_id: sid.clone(), + }), + }) + } + + pub fn name(&self) -> &str { &self.inner.name } + pub fn version(&self) -> &semver::Version { &self.inner.version } + pub fn source_id(&self) -> &SourceId { &self.inner.source_id } + + pub fn with_precise(&self, precise: Option) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name.to_string(), + version: self.inner.version.clone(), + source_id: self.inner.source_id.with_precise(precise), + }), + } + } + + pub fn with_source_id(&self, source: &SourceId) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name.to_string(), + version: self.inner.version.clone(), + source_id: source.clone(), + }), + } + } + + pub fn stable_hash<'a>(&'a self, workspace: &'a Path) -> PackageIdStableHash<'a> { + PackageIdStableHash(self, workspace) + } +} + +pub struct PackageIdStableHash<'a>(&'a PackageId, &'a Path); + +impl<'a> Hash for PackageIdStableHash<'a> { + fn hash(&self, state: &mut S) { + self.0.inner.name.hash(state); + self.0.inner.version.hash(state); + self.0.inner.source_id.stable_hash(self.1, state); + } +} + +impl fmt::Display for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "{} v{}", self.inner.name, self.inner.version)?; + + if !self.inner.source_id.is_default_registry() { + write!(f, " ({})", self.inner.source_id)?; + } + + Ok(()) + } +} + +impl fmt::Debug for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_struct("PackageId") + .field("name", &self.inner.name) + .field("version", &self.inner.version.to_string()) + .field("source", &self.inner.source_id.to_string()) + .finish() + } +} + +#[cfg(test)] +mod tests { + use super::PackageId; + use core::source::SourceId; + use sources::CRATES_IO; + use util::ToUrl; + + #[test] + fn invalid_version_handled_nicely() { + let loc = CRATES_IO.to_url().unwrap(); + let repo = SourceId::for_registry(&loc).unwrap(); + + assert!(PackageId::new("foo", "1.0", &repo).is_err()); + assert!(PackageId::new("foo", "1", &repo).is_err()); + assert!(PackageId::new("foo", "bar", &repo).is_err()); + assert!(PackageId::new("foo", "", &repo).is_err()); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/package_id_spec.rs b/collector/compile-benchmarks/cargo/src/cargo/core/package_id_spec.rs new file mode 100644 index 000000000..d271f2f66 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/package_id_spec.rs @@ -0,0 +1,280 @@ +use std::collections::HashMap; +use std::fmt; + +use semver::Version; +use url::Url; + +use core::PackageId; +use util::{ToUrl, ToSemver}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct PackageIdSpec { + name: String, + version: Option, + url: Option, +} + +impl PackageIdSpec { + pub fn parse(spec: &str) -> CargoResult { + if spec.contains('/') { + if let Ok(url) = spec.to_url() { + return PackageIdSpec::from_url(url); + } + if !spec.contains("://") { + if let Ok(url) = Url::parse(&format!("cargo://{}", spec)) { + return PackageIdSpec::from_url(url); + } + } + } + let mut parts = spec.splitn(2, ':'); + let name = parts.next().unwrap(); + let version = match parts.next() { + Some(version) => Some(Version::parse(version)?), + None => None, + }; + for ch in name.chars() { + if !ch.is_alphanumeric() && ch != '_' && ch != '-' { + bail!("invalid character in pkgid `{}`: `{}`", spec, ch) + } + } + Ok(PackageIdSpec { + name: name.to_string(), + version: version, + url: None, + }) + } + + pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId> + where I: IntoIterator + { + let spec = PackageIdSpec::parse(spec).chain_err(|| { + format!("invalid package id specification: `{}`", spec) + })?; + spec.query(i) + } + + pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec { + PackageIdSpec { + name: package_id.name().to_string(), + version: Some(package_id.version().clone()), + url: Some(package_id.source_id().url().clone()), + } + } + + fn from_url(mut url: Url) -> CargoResult { + if url.query().is_some() { + bail!("cannot have a query string in a pkgid: {}", url) + } + let frag = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); + let (name, version) = { + let mut path = url.path_segments().ok_or_else(|| { + CargoError::from(format!("pkgid urls must have a path: {}", url)) + })?; + let path_name = path.next_back().ok_or_else(|| { + CargoError::from(format!("pkgid urls must have at least one path \ + component: {}", url)) + })?; + match frag { + Some(fragment) => { + let mut parts = fragment.splitn(2, ':'); + let name_or_version = parts.next().unwrap(); + match parts.next() { + Some(part) => { + let version = part.to_semver()?; + (name_or_version.to_string(), Some(version)) + } + None => { + if name_or_version.chars().next().unwrap() + .is_alphabetic() { + (name_or_version.to_string(), None) + } else { + let version = name_or_version.to_semver()?; + (path_name.to_string(), Some(version)) + } + } + } + } + None => (path_name.to_string(), None), + } + }; + Ok(PackageIdSpec { + name: name, + version: version, + url: Some(url), + }) + } + + pub fn name(&self) -> &str { &self.name } + pub fn version(&self) -> Option<&Version> { self.version.as_ref() } + pub fn url(&self) -> Option<&Url> { self.url.as_ref() } + + pub fn set_url(&mut self, url: Url) { + self.url = Some(url); + } + + pub fn matches(&self, package_id: &PackageId) -> bool { + if self.name() != package_id.name() { return false } + + if let Some(ref v) = self.version { + if v != package_id.version() { + return false; + } + } + + match self.url { + Some(ref u) => u == package_id.source_id().url(), + None => true + } + } + + pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId> + where I: IntoIterator + { + let mut ids = i.into_iter().filter(|p| self.matches(*p)); + let ret = match ids.next() { + Some(id) => id, + None => bail!("package id specification `{}` \ + matched no packages", self), + }; + return match ids.next() { + Some(other) => { + let mut msg = format!("There are multiple `{}` packages in \ + your project, and the specification \ + `{}` is ambiguous.\n\ + Please re-run this command \ + with `-p ` where `` is one \ + of the following:", + self.name(), self); + let mut vec = vec![ret, other]; + vec.extend(ids); + minimize(&mut msg, &vec, self); + Err(msg.into()) + } + None => Ok(ret) + }; + + fn minimize(msg: &mut String, + ids: &[&PackageId], + spec: &PackageIdSpec) { + let mut version_cnt = HashMap::new(); + for id in ids { + *version_cnt.entry(id.version()).or_insert(0) += 1; + } + for id in ids { + if version_cnt[id.version()] == 1 { + msg.push_str(&format!("\n {}:{}", spec.name(), + id.version())); + } else { + msg.push_str(&format!("\n {}", + PackageIdSpec::from_package_id(*id))); + } + } + } + } +} + +impl fmt::Display for PackageIdSpec { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut printed_name = false; + match self.url { + Some(ref url) => { + if url.scheme() == "cargo" { + write!(f, "{}{}", url.host().unwrap(), url.path())?; + } else { + write!(f, "{}", url)?; + } + if url.path_segments().unwrap().next_back().unwrap() != self.name { + printed_name = true; + write!(f, "#{}", self.name)?; + } + } + None => { printed_name = true; write!(f, "{}", self.name)? } + } + if let Some(ref v) = self.version { + write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?; + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use core::{PackageId, SourceId}; + use super::PackageIdSpec; + use url::Url; + use semver::Version; + + #[test] + fn good_parsing() { + fn ok(spec: &str, expected: PackageIdSpec) { + let parsed = PackageIdSpec::parse(spec).unwrap(); + assert_eq!(parsed, expected); + assert_eq!(parsed.to_string(), spec); + } + + ok("http://crates.io/foo#1.2.3", PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("http://crates.io/foo").unwrap()), + }); + ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec { + name: "bar".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("http://crates.io/foo").unwrap()), + }); + ok("crates.io/foo", PackageIdSpec { + name: "foo".to_string(), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }); + ok("crates.io/foo#1.2.3", PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }); + ok("crates.io/foo#bar", PackageIdSpec { + name: "bar".to_string(), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }); + ok("crates.io/foo#bar:1.2.3", PackageIdSpec { + name: "bar".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }); + ok("foo", PackageIdSpec { + name: "foo".to_string(), + version: None, + url: None, + }); + ok("foo:1.2.3", PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: None, + }); + } + + #[test] + fn bad_parsing() { + assert!(PackageIdSpec::parse("baz:").is_err()); + assert!(PackageIdSpec::parse("baz:*").is_err()); + assert!(PackageIdSpec::parse("baz:1.0").is_err()); + assert!(PackageIdSpec::parse("http://baz:1.0").is_err()); + assert!(PackageIdSpec::parse("http://#baz:1.0").is_err()); + } + + #[test] + fn matching() { + let url = Url::parse("http://example.com").unwrap(); + let sid = SourceId::for_registry(&url).unwrap(); + let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); + let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); + + assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo)); + assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); + assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo)); + assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo)); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/registry.rs b/collector/compile-benchmarks/cargo/src/cargo/core/registry.rs new file mode 100644 index 000000000..517486801 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/registry.rs @@ -0,0 +1,607 @@ +use std::collections::HashMap; + +use semver::VersionReq; +use url::Url; + +use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId}; +use core::PackageSet; +use util::{Config, profile}; +use util::errors::{CargoResult, CargoResultExt}; +use sources::config::SourceConfigMap; + +/// Source of information about a group of packages. +/// +/// See also `core::Source`. +pub trait Registry { + /// Attempt to find the packages that match a dependency request. + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()>; + + fn query_vec(&mut self, dep: &Dependency) -> CargoResult> { + let mut ret = Vec::new(); + self.query(dep, &mut |s| ret.push(s))?; + Ok(ret) + } + + /// Returns whether or not this registry will return summaries with + /// checksums listed. + fn supports_checksums(&self) -> bool; + + /// Returns whether or not this registry will return summaries with + /// the `precise` field in the source id listed. + fn requires_precise(&self) -> bool; +} + +impl<'a, T: ?Sized + Registry + 'a> Registry for Box { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + (**self).query(dep, f) + } + + fn supports_checksums(&self) -> bool { + (**self).supports_checksums() + } + + fn requires_precise(&self) -> bool { + (**self).requires_precise() + } +} + +/// This structure represents a registry of known packages. It internally +/// contains a number of `Box` instances which are used to load a +/// `Package` from. +/// +/// The resolution phase of Cargo uses this to drive knowledge about new +/// packages as well as querying for lists of new packages. It is here that +/// sources are updated (e.g. network operations) and overrides are +/// handled. +/// +/// The general idea behind this registry is that it is centered around the +/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to +/// a `Source`. Each `Source` in the map has been updated (using network +/// operations if necessary) and is ready to be queried for packages. +pub struct PackageRegistry<'cfg> { + sources: SourceMap<'cfg>, + + // A list of sources which are considered "overrides" which take precedent + // when querying for packages. + overrides: Vec, + + // Note that each SourceId does not take into account its `precise` field + // when hashing or testing for equality. When adding a new `SourceId`, we + // want to avoid duplicates in the `SourceMap` (to prevent re-updating the + // same git repo twice for example), but we also want to ensure that the + // loaded source is always updated. + // + // Sources with a `precise` field normally don't need to be updated because + // their contents are already on disk, but sources without a `precise` field + // almost always need to be updated. If we have a cached `Source` for a + // precise `SourceId`, then when we add a new `SourceId` that is not precise + // we want to ensure that the underlying source is updated. + // + // This is basically a long-winded way of saying that we want to know + // precisely what the keys of `sources` are, so this is a mapping of key to + // what exactly the key is. + source_ids: HashMap, + + locked: LockedMap, + source_config: SourceConfigMap<'cfg>, + patches: HashMap>, +} + +type LockedMap = HashMap)>>>; + +#[derive(PartialEq, Eq, Clone, Copy)] +enum Kind { + Override, + Locked, + Normal, +} + +impl<'cfg> PackageRegistry<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let source_config = SourceConfigMap::new(config)?; + Ok(PackageRegistry { + sources: SourceMap::new(), + source_ids: HashMap::new(), + overrides: Vec::new(), + source_config: source_config, + locked: HashMap::new(), + patches: HashMap::new(), + }) + } + + pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { + trace!("getting packages; sources={}", self.sources.len()); + PackageSet::new(package_ids, self.sources) + } + + fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { + match self.source_ids.get(namespace) { + // We've previously loaded this source, and we've already locked it, + // so we're not allowed to change it even if `namespace` has a + // slightly different precise version listed. + Some(&(_, Kind::Locked)) => { + debug!("load/locked {}", namespace); + return Ok(()) + } + + // If the previous source was not a precise source, then we can be + // sure that it's already been updated if we've already loaded it. + Some(&(ref previous, _)) if previous.precise().is_none() => { + debug!("load/precise {}", namespace); + return Ok(()) + } + + // If the previous source has the same precise version as we do, + // then we're done, otherwise we need to need to move forward + // updating this source. + Some(&(ref previous, _)) => { + if previous.precise() == namespace.precise() { + debug!("load/match {}", namespace); + return Ok(()) + } + debug!("load/mismatch {}", namespace); + } + None => { + debug!("load/missing {}", namespace); + } + } + + self.load(namespace, kind)?; + Ok(()) + } + + pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { + for id in ids.iter() { + self.ensure_loaded(id, Kind::Locked)?; + } + Ok(()) + } + + pub fn add_preloaded(&mut self, source: Box) { + self.add_source(source, Kind::Locked); + } + + fn add_source(&mut self, source: Box, kind: Kind) { + let id = source.source_id().clone(); + self.sources.insert(source); + self.source_ids.insert(id.clone(), (id, kind)); + } + + pub fn add_override(&mut self, source: Box) { + self.overrides.push(source.source_id().clone()); + self.add_source(source, Kind::Override); + } + + pub fn register_lock(&mut self, id: PackageId, deps: Vec) { + trace!("register_lock: {}", id); + for dep in deps.iter() { + trace!("\t-> {}", dep); + } + let sub_map = self.locked.entry(id.source_id().clone()) + .or_insert(HashMap::new()); + let sub_vec = sub_map.entry(id.name().to_string()) + .or_insert(Vec::new()); + sub_vec.push((id, deps)); + } + + pub fn patch(&mut self, url: &Url, deps: &[Dependency]) -> CargoResult<()> { + let deps = deps.iter().map(|dep| { + let mut summaries = self.query_vec(dep)?.into_iter(); + let summary = match summaries.next() { + Some(summary) => summary, + None => { + bail!("patch for `{}` in `{}` did not resolve to any crates", + dep.name(), url) + } + }; + if summaries.next().is_some() { + bail!("patch for `{}` in `{}` resolved to more than one candidate", + dep.name(), url) + } + if summary.package_id().source_id().url() == url { + bail!("patch for `{}` in `{}` points to the same source, but \ + patches must point to different sources", + dep.name(), url); + } + Ok(summary) + }).collect::>>().chain_err(|| { + format!("failed to resolve patches for `{}`", url) + })?; + + self.patches.insert(url.clone(), deps); + + Ok(()) + } + + pub fn patches(&self) -> &HashMap> { + &self.patches + } + + fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { + (|| { + let source = self.source_config.load(source_id)?; + assert_eq!(source.source_id(), source_id); + + if kind == Kind::Override { + self.overrides.push(source_id.clone()); + } + self.add_source(source, kind); + + // Ensure the source has fetched all necessary remote data. + let _p = profile::start(format!("updating: {}", source_id)); + self.sources.get_mut(source_id).unwrap().update() + })().chain_err(|| format!("Unable to update {}", source_id)) + } + + fn query_overrides(&mut self, dep: &Dependency) + -> CargoResult> { + for s in self.overrides.iter() { + let src = self.sources.get_mut(s).unwrap(); + let dep = Dependency::new_override(dep.name(), s); + let mut results = src.query_vec(&dep)?; + if !results.is_empty() { + return Ok(Some(results.remove(0))) + } + } + Ok(None) + } + + /// This function is used to transform a summary to another locked summary + /// if possible. This is where the concept of a lockfile comes into play. + /// + /// If a summary points at a package id which was previously locked, then we + /// override the summary's id itself, as well as all dependencies, to be + /// rewritten to the locked versions. This will transform the summary's + /// source to a precise source (listed in the locked version) as well as + /// transforming all of the dependencies from range requirements on + /// imprecise sources to exact requirements on precise sources. + /// + /// If a summary does not point at a package id which was previously locked, + /// or if any dependencies were added and don't have a previously listed + /// version, we still want to avoid updating as many dependencies as + /// possible to keep the graph stable. In this case we map all of the + /// summary's dependencies to be rewritten to a locked version wherever + /// possible. If we're unable to map a dependency though, we just pass it on + /// through. + pub fn lock(&self, summary: Summary) -> Summary { + lock(&self.locked, &self.patches, summary) + } + + fn warn_bad_override(&self, + override_summary: &Summary, + real_summary: &Summary) -> CargoResult<()> { + let mut real_deps = real_summary.dependencies().iter().collect::>(); + + let boilerplate = "\ +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +http://doc.crates.io/specifying-dependencies.html#overriding-dependencies +"; + + for dep in override_summary.dependencies() { + if let Some(i) = real_deps.iter().position(|d| dep == *d) { + real_deps.remove(i); + continue + } + let msg = format!("\ + path override for crate `{}` has altered the original list of\n\ + dependencies; the dependency on `{}` was either added or\n\ + modified to not match the previously resolved version\n\n\ + {}", override_summary.package_id().name(), dep.name(), boilerplate); + self.source_config.config().shell().warn(&msg)?; + return Ok(()) + } + + if let Some(id) = real_deps.get(0) { + let msg = format!("\ + path override for crate `{}` has altered the original list of + dependencies; the dependency on `{}` was removed\n\n + {}", override_summary.package_id().name(), id.name(), boilerplate); + self.source_config.config().shell().warn(&msg)?; + return Ok(()) + } + + Ok(()) + } +} + +impl<'cfg> Registry for PackageRegistry<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + let (override_summary, n, to_warn) = { + // Look for an override and get ready to query the real source. + let override_summary = self.query_overrides(dep)?; + + // Next up on our list of candidates is to check the `[patch]` + // section of the manifest. Here we look through all patches + // relevant to the source that `dep` points to, and then we match + // name/version. Note that we don't use `dep.matches(..)` because + // the patches, by definition, come from a different source. + // This means that `dep.matches(..)` will always return false, when + // what we really care about is the name/version match. + let mut patches = Vec::

::new(); + if let Some(extra) = self.patches.get(dep.source_id().url()) { + patches.extend(extra.iter().filter(|s| { + dep.matches_ignoring_source(s) + }).cloned()); + } + + // A crucial feature of the `[patch]` feature is that we *don't* + // query the actual registry if we have a "locked" dependency. A + // locked dep basically just means a version constraint of `=a.b.c`, + // and because patches take priority over the actual source then if + // we have a candidate we're done. + if patches.len() == 1 && dep.is_locked() { + let patch = patches.remove(0); + match override_summary { + Some(summary) => (summary, 1, Some(patch)), + None => { + f(patch); + return Ok(()) + } + } + } else { + if !patches.is_empty() { + debug!("found {} patches with an unlocked dep, \ + looking at sources", patches.len()); + } + + // Ensure the requested source_id is loaded + self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| { + format!("failed to load source for a dependency \ + on `{}`", dep.name()) + })?; + + let source = self.sources.get_mut(dep.source_id()); + match (override_summary, source) { + (Some(_), None) => bail!("override found but no real ones"), + (None, None) => return Ok(()), + + // If we don't have an override then we just ship + // everything upstairs after locking the summary + (None, Some(source)) => { + for patch in patches.iter() { + f(patch.clone()); + } + + // Our sources shouldn't ever come back to us with two + // summaries that have the same version. We could, + // however, have an `[patch]` section which is in use + // to override a version in the registry. This means + // that if our `summary` in this loop has the same + // version as something in `patches` that we've + // already selected, then we skip this `summary`. + let locked = &self.locked; + let all_patches = &self.patches; + return source.query(dep, &mut |summary| { + for patch in patches.iter() { + let patch = patch.package_id().version(); + if summary.package_id().version() == patch { + return + } + } + f(lock(locked, all_patches, summary)) + }) + } + + // If we have an override summary then we query the source + // to sanity check its results. We don't actually use any of + // the summaries it gives us though. + (Some(override_summary), Some(source)) => { + if !patches.is_empty() { + bail!("found patches and a path override") + } + let mut n = 0; + let mut to_warn = None; + source.query(dep, &mut |summary| { + n += 1; + to_warn = Some(summary); + })?; + (override_summary, n, to_warn) + } + } + } + }; + + if n > 1 { + bail!("found an override with a non-locked list"); + } else if let Some(summary) = to_warn { + self.warn_bad_override(&override_summary, &summary)?; + } + f(self.lock(override_summary)); + Ok(()) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } +} + +fn lock(locked: &LockedMap, + patches: &HashMap>, + summary: Summary) -> Summary { + let pair = locked.get(summary.source_id()).and_then(|map| { + map.get(summary.name()) + }).and_then(|vec| { + vec.iter().find(|&&(ref id, _)| id == summary.package_id()) + }); + + trace!("locking summary of {}", summary.package_id()); + + // Lock the summary's id if possible + let summary = match pair { + Some(&(ref precise, _)) => summary.override_id(precise.clone()), + None => summary, + }; + summary.map_dependencies(|dep| { + trace!("\t{}/{}/{}", dep.name(), dep.version_req(), + dep.source_id()); + + // If we've got a known set of overrides for this summary, then + // one of a few cases can arise: + // + // 1. We have a lock entry for this dependency from the same + // source as it's listed as coming from. In this case we make + // sure to lock to precisely the given package id. + // + // 2. We have a lock entry for this dependency, but it's from a + // different source than what's listed, or the version + // requirement has changed. In this case we must discard the + // locked version because the dependency needs to be + // re-resolved. + // + // 3. We don't have a lock entry for this dependency, in which + // case it was likely an optional dependency which wasn't + // included previously so we just pass it through anyway. + // + // Cases 1/2 are handled by `matches_id` and case 3 is handled by + // falling through to the logic below. + if let Some(&(_, ref locked_deps)) = pair { + let locked = locked_deps.iter().find(|id| dep.matches_id(id)); + if let Some(locked) = locked { + trace!("\tfirst hit on {}", locked); + let mut dep = dep.clone(); + dep.lock_to(locked); + return dep + } + } + + // If this dependency did not have a locked version, then we query + // all known locked packages to see if they match this dependency. + // If anything does then we lock it to that and move on. + let v = locked.get(dep.source_id()).and_then(|map| { + map.get(dep.name()) + }).and_then(|vec| { + vec.iter().find(|&&(ref id, _)| dep.matches_id(id)) + }); + if let Some(&(ref id, _)) = v { + trace!("\tsecond hit on {}", id); + let mut dep = dep.clone(); + dep.lock_to(id); + return dep + } + + // Finally we check to see if any registered patches correspond to + // this dependency. + let v = patches.get(dep.source_id().url()).map(|vec| { + let dep2 = dep.clone(); + let mut iter = vec.iter().filter(move |s| { + dep2.name() == s.package_id().name() && + dep2.version_req().matches(s.package_id().version()) + }); + (iter.next(), iter) + }); + if let Some((Some(summary), mut remaining)) = v { + assert!(remaining.next().is_none()); + let patch_source = summary.package_id().source_id(); + let patch_locked = locked.get(patch_source).and_then(|m| { + m.get(summary.package_id().name()) + }).map(|list| { + list.iter().any(|&(ref id, _)| id == summary.package_id()) + }).unwrap_or(false); + + if patch_locked { + trace!("\tthird hit on {}", summary.package_id()); + let req = VersionReq::exact(summary.package_id().version()); + let mut dep = dep.clone(); + dep.set_version_req(req); + return dep + } + } + + trace!("\tnope, unlocked"); + dep + }) +} + +#[cfg(test)] +pub mod test { + use core::{Summary, Registry, Dependency}; + use util::CargoResult; + + pub struct RegistryBuilder { + summaries: Vec, + overrides: Vec + } + + impl RegistryBuilder { + pub fn new() -> RegistryBuilder { + RegistryBuilder { summaries: vec![], overrides: vec![] } + } + + pub fn summary(mut self, summary: Summary) -> RegistryBuilder { + self.summaries.push(summary); + self + } + + pub fn summaries(mut self, summaries: Vec) -> RegistryBuilder { + self.summaries.extend(summaries.into_iter()); + self + } + + pub fn add_override(mut self, summary: Summary) -> RegistryBuilder { + self.overrides.push(summary); + self + } + + pub fn overrides(mut self, summaries: Vec) -> RegistryBuilder { + self.overrides.extend(summaries.into_iter()); + self + } + + fn query_overrides(&self, dep: &Dependency) -> Vec { + self.overrides.iter() + .filter(|s| s.name() == dep.name()) + .map(|s| s.clone()) + .collect() + } + } + + impl Registry for RegistryBuilder { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + debug!("querying; dep={:?}", dep); + + let overrides = self.query_overrides(dep); + + if overrides.is_empty() { + for s in self.summaries.iter() { + if dep.matches(s) { + f(s.clone()); + } + } + Ok(()) + } else { + for s in overrides { + f(s); + } + Ok(()) + } + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/resolver/encode.rs b/collector/compile-benchmarks/cargo/src/cargo/core/resolver/encode.rs new file mode 100644 index 000000000..a0cf0bac0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/resolver/encode.rs @@ -0,0 +1,420 @@ +use std::collections::{HashMap, HashSet, BTreeMap}; +use std::fmt; +use std::str::FromStr; + +use serde::ser; +use serde::de; + +use core::{Package, PackageId, SourceId, Workspace, Dependency}; +use util::{Graph, Config, internal}; +use util::errors::{CargoResult, CargoResultExt, CargoError}; + +use super::Resolve; + +#[derive(Serialize, Deserialize, Debug)] +pub struct EncodableResolve { + package: Option>, + /// `root` is optional to allow backward compatibility. + root: Option, + metadata: Option, + + #[serde(default, skip_serializing_if = "Patch::is_empty")] + patch: Patch, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +struct Patch { + unused: Vec, +} + +pub type Metadata = BTreeMap; + +impl EncodableResolve { + pub fn into_resolve(self, ws: &Workspace) -> CargoResult { + let path_deps = build_path_deps(ws); + + let packages = { + let mut packages = self.package.unwrap_or_default(); + if let Some(root) = self.root { + packages.insert(0, root); + } + packages + }; + + // `PackageId`s in the lock file don't include the `source` part + // for workspace members, so we reconstruct proper ids. + let (live_pkgs, all_pkgs) = { + let mut live_pkgs = HashMap::new(); + let mut all_pkgs = HashSet::new(); + for pkg in packages.iter() { + let enc_id = EncodablePackageId { + name: pkg.name.clone(), + version: pkg.version.clone(), + source: pkg.source.clone(), + }; + + if !all_pkgs.insert(enc_id.clone()) { + return Err(internal(format!("package `{}` is specified twice in the lockfile", + pkg.name))); + } + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + // We failed to find a local package in the workspace. + // It must have been removed and should be ignored. + None => { + debug!("path dependency now missing {} v{}", + pkg.name, + pkg.version); + continue + } + Some(source) => PackageId::new(&pkg.name, &pkg.version, source)? + }; + + assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) + } + (live_pkgs, all_pkgs) + }; + + let lookup_id = |enc_id: &EncodablePackageId| -> CargoResult> { + match live_pkgs.get(enc_id) { + Some(&(ref id, _)) => Ok(Some(id.clone())), + None => if all_pkgs.contains(enc_id) { + // Package is found in the lockfile, but it is + // no longer a member of the workspace. + Ok(None) + } else { + Err(internal(format!("package `{}` is specified as a dependency, \ + but is missing from the package list", enc_id))) + } + } + }; + + let g = { + let mut g = Graph::new(); + + for &(ref id, _) in live_pkgs.values() { + g.add(id.clone(), &[]); + } + + for &(ref id, pkg) in live_pkgs.values() { + let deps = match pkg.dependencies { + Some(ref deps) => deps, + None => continue + }; + + for edge in deps.iter() { + if let Some(to_depend_on) = lookup_id(edge)? { + g.link(id.clone(), to_depend_on); + } + } + } + g + }; + + let replacements = { + let mut replacements = HashMap::new(); + for &(ref id, pkg) in live_pkgs.values() { + if let Some(ref replace) = pkg.replace { + assert!(pkg.dependencies.is_none()); + if let Some(replace_id) = lookup_id(replace)? { + replacements.insert(id.clone(), replace_id); + } + } + } + replacements + }; + + let mut metadata = self.metadata.unwrap_or_default(); + + // Parse out all package checksums. After we do this we can be in a few + // situations: + // + // * We parsed no checksums. In this situation we're dealing with an old + // lock file and we're gonna fill them all in. + // * We parsed some checksums, but not one for all packages listed. It + // could have been the case that some were listed, then an older Cargo + // client added more dependencies, and now we're going to fill in the + // missing ones. + // * There are too many checksums listed, indicative of an older Cargo + // client removing a package but not updating the checksums listed. + // + // In all of these situations they're part of normal usage, so we don't + // really worry about it. We just try to slurp up as many checksums as + // possible. + let mut checksums = HashMap::new(); + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let enc_id: EncodablePackageId = k.parse().chain_err(|| { + internal("invalid encoding of checksum in lockfile") + })?; + let id = match lookup_id(&enc_id) { + Ok(Some(id)) => id, + _ => continue, + }; + + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + + for k in to_remove { + metadata.remove(&k); + } + + let mut unused_patches = Vec::new(); + for pkg in self.patch.unused { + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + Some(src) => PackageId::new(&pkg.name, &pkg.version, src)?, + None => continue, + }; + unused_patches.push(id); + } + + Ok(Resolve { + graph: g, + empty_features: HashSet::new(), + features: HashMap::new(), + replacements: replacements, + checksums: checksums, + metadata: metadata, + unused_patches: unused_patches, + }) + } +} + +fn build_path_deps(ws: &Workspace) -> HashMap { + // If a crate is *not* a path source, then we're probably in a situation + // such as `cargo install` with a lock file from a remote dependency. In + // that case we don't need to fixup any path dependencies (as they're not + // actually path dependencies any more), so we ignore them. + let members = ws.members().filter(|p| { + p.package_id().source_id().is_path() + }).collect::>(); + + let mut ret = HashMap::new(); + let mut visited = HashSet::new(); + for member in members.iter() { + ret.insert(member.package_id().name().to_string(), + member.package_id().source_id().clone()); + visited.insert(member.package_id().source_id().clone()); + } + for member in members.iter() { + build_pkg(member, ws.config(), &mut ret, &mut visited); + } + for (_, deps) in ws.root_patch() { + for dep in deps { + build_dep(dep, ws.config(), &mut ret, &mut visited); + } + } + for &(_, ref dep) in ws.root_replace() { + build_dep(dep, ws.config(), &mut ret, &mut visited); + } + + return ret; + + fn build_pkg(pkg: &Package, + config: &Config, + ret: &mut HashMap, + visited: &mut HashSet) { + for dep in pkg.dependencies() { + build_dep(dep, config, ret, visited); + } + } + + fn build_dep(dep: &Dependency, + config: &Config, + ret: &mut HashMap, + visited: &mut HashSet) { + let id = dep.source_id(); + if visited.contains(id) || !id.is_path() { + return + } + let path = match id.url().to_file_path() { + Ok(p) => p.join("Cargo.toml"), + Err(_) => return, + }; + let pkg = match Package::for_path(&path, config) { + Ok(p) => p, + Err(_) => return, + }; + ret.insert(pkg.name().to_string(), + pkg.package_id().source_id().clone()); + visited.insert(pkg.package_id().source_id().clone()); + build_pkg(&pkg, config, ret, visited); + } +} + +impl Patch { + fn is_empty(&self) -> bool { + self.unused.is_empty() + } +} + +#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodableDependency { + name: String, + version: String, + source: Option, + dependencies: Option>, + replace: Option, +} + +#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] +pub struct EncodablePackageId { + name: String, + version: String, + source: Option +} + +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {}", self.name, self.version)?; + if let Some(ref s) = self.source { + write!(f, " ({})", s.to_url())?; + } + Ok(()) + } +} + +impl FromStr for EncodablePackageId { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut s = s.splitn(3, ' '); + let name = s.next().unwrap(); + let version = s.next().ok_or_else(|| { + internal("invalid serialized PackageId") + })?; + let source_id = match s.next() { + Some(s) => { + if s.starts_with('(') && s.ends_with(')') { + Some(SourceId::from_url(&s[1..s.len() - 1])?) + } else { + bail!("invalid serialized PackageId") + } + } + None => None, + }; + + Ok(EncodablePackageId { + name: name.to_string(), + version: version.to_string(), + source: source_id + }) + } +} + +impl ser::Serialize for EncodablePackageId { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + s.collect_str(self) + } +} + +impl<'de> de::Deserialize<'de> for EncodablePackageId { + fn deserialize(d: D) -> Result + where D: de::Deserializer<'de>, + { + String::deserialize(d).and_then(|string| { + string.parse::() + .map_err(de::Error::custom) + }) + } +} + +pub struct WorkspaceResolve<'a, 'cfg: 'a> { + pub ws: &'a Workspace<'cfg>, + pub resolve: &'a Resolve, +} + +impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect(); + ids.sort(); + + let encodable = ids.iter().filter_map(|&id| { + Some(encodable_resolve_node(id, self.resolve)) + }).collect::>(); + + let mut metadata = self.resolve.metadata.clone(); + + for id in ids.iter().filter(|id| !id.source_id().is_path()) { + let checksum = match self.resolve.checksums[*id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id); + metadata.insert(format!("checksum {}", id.to_string()), + checksum.to_string()); + } + + let metadata = if metadata.is_empty() { None } else { Some(metadata) }; + + let patch = Patch { + unused: self.resolve.unused_patches().iter().map(|id| { + EncodableDependency { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()), + dependencies: None, + replace: None, + } + }).collect(), + }; + EncodableResolve { + package: Some(encodable), + root: None, + metadata: metadata, + patch: patch, + }.serialize(s) + } +} + +fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) + -> EncodableDependency { + let (replace, deps) = match resolve.replacement(id) { + Some(id) => { + (Some(encodable_package_id(id)), None) + } + None => { + let mut deps = resolve.graph.edges(id) + .into_iter().flat_map(|a| a) + .map(encodable_package_id) + .collect::>(); + deps.sort(); + (None, Some(deps)) + } + }; + + EncodableDependency { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()), + dependencies: deps, + replace: replace, + } +} + +fn encodable_package_id(id: &PackageId) -> EncodablePackageId { + EncodablePackageId { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()).map(|s| s.with_precise(None)), + } +} + +fn encode_source(id: &SourceId) -> Option { + if id.is_path() { + None + } else { + Some(id.clone()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/resolver/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/core/resolver/mod.rs new file mode 100644 index 000000000..4c938f87e --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/resolver/mod.rs @@ -0,0 +1,1265 @@ +//! Resolution of the entire dependency graph for a crate +//! +//! This module implements the core logic in taking the world of crates and +//! constraints and creating a resolved graph with locked versions for all +//! crates and their dependencies. This is separate from the registry module +//! which is more worried about discovering crates from various sources, this +//! module just uses the Registry trait as a source to learn about crates from. +//! +//! Actually solving a constraint graph is an NP-hard problem. This algorithm +//! is basically a nice heuristic to make sure we get roughly the best answer +//! most of the time. The constraints that we're working with are: +//! +//! 1. Each crate can have any number of dependencies. Each dependency can +//! declare a version range that it is compatible with. +//! 2. Crates can be activated with multiple version (e.g. show up in the +//! dependency graph twice) so long as each pairwise instance have +//! semver-incompatible versions. +//! +//! The algorithm employed here is fairly simple, we simply do a DFS, activating +//! the "newest crate" (highest version) first and then going to the next +//! option. The heuristics we employ are: +//! +//! * Never try to activate a crate version which is incompatible. This means we +//! only try crates which will actually satisfy a dependency and we won't ever +//! try to activate a crate that's semver compatible with something else +//! activated (as we're only allowed to have one). +//! * Always try to activate the highest version crate first. The default +//! dependency in Cargo (e.g. when you write `foo = "0.1.2"`) is +//! semver-compatible, so selecting the highest version possible will allow us +//! to hopefully satisfy as many dependencies at once. +//! +//! Beyond that, what's implemented below is just a naive backtracking version +//! which should in theory try all possible combinations of dependencies and +//! versions to see if one works. The first resolution that works causes +//! everything to bail out immediately and return success, and only if *nothing* +//! works do we actually return an error up the stack. +//! +//! ## Performance +//! +//! Note that this is a relatively performance-critical portion of Cargo. The +//! data that we're processing is proportional to the size of the dependency +//! graph, which can often be quite large (e.g. take a look at Servo). To make +//! matters worse the DFS algorithm we're implemented is inherently quite +//! inefficient. When we add the requirement of backtracking on top it means +//! that we're implementing something that probably shouldn't be allocating all +//! over the place. + +use std::cmp::Ordering; +use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap}; +use std::iter::FromIterator; +use std::fmt; +use std::ops::Range; +use std::rc::Rc; + +use semver; +use url::Url; + +use core::{PackageId, Registry, SourceId, Summary, Dependency}; +use core::PackageIdSpec; +use util::config::Config; +use util::Graph; +use util::errors::{CargoResult, CargoError}; +use util::profile; +use util::graph::{Nodes, Edges}; + +pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; +pub use self::encode::{Metadata, WorkspaceResolve}; + +mod encode; + +/// Represents a fully resolved package dependency graph. Each node in the graph +/// is a package and edges represent dependencies between packages. +/// +/// Each instance of `Resolve` also understands the full set of features used +/// for each package. +#[derive(PartialEq)] +pub struct Resolve { + graph: Graph, + replacements: HashMap, + empty_features: HashSet, + features: HashMap>, + checksums: HashMap>, + metadata: Metadata, + unused_patches: Vec, +} + +pub struct Deps<'a> { + edges: Option>, + resolve: &'a Resolve, +} + +pub struct DepsNotReplaced<'a> { + edges: Option>, +} + +#[derive(Clone, Copy)] +pub enum Method<'a> { + Everything, + Required { + dev_deps: bool, + features: &'a [String], + uses_default_features: bool, + }, +} + +// Information about the dependencies for a crate, a tuple of: +// +// (dependency info, candidates, features activated) +type DepInfo = (Dependency, Rc>, Rc>); + +#[derive(Clone)] +struct Candidate { + summary: Summary, + replace: Option, +} + +impl Resolve { + /// Resolves one of the paths from the given dependent package up to + /// the root. + pub fn path_to_top(&self, pkg: &PackageId) -> Vec<&PackageId> { + let mut result = Vec::new(); + let mut pkg = pkg; + while let Some(pulling) = self.graph + .get_nodes() + .iter() + .filter_map(|(pulling, pulled)| + if pulled.contains(pkg) { + Some(pulling) + } else { + None + }) + .nth(0) { + result.push(pulling); + pkg = pulling; + } + result + } + pub fn register_used_patches(&mut self, + patches: &HashMap>) { + for summary in patches.values().flat_map(|v| v) { + if self.iter().any(|id| id == summary.package_id()) { + continue + } + self.unused_patches.push(summary.package_id().clone()); + } + } + + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package id has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacment, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + bail!("\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", id, id.source_id()) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + bail!("\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` is the same as when the lockfile was generated +", id, id.source_id()) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + bail!("\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` is the same as when the lockfile was generated +", id); + } + } + } + + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + Ok(()) + } + + pub fn iter(&self) -> Nodes { + self.graph.iter() + } + + pub fn deps(&self, pkg: &PackageId) -> Deps { + Deps { edges: self.graph.edges(pkg), resolve: self } + } + + pub fn deps_not_replaced(&self, pkg: &PackageId) -> DepsNotReplaced { + DepsNotReplaced { edges: self.graph.edges(pkg) } + } + + pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> { + self.replacements.get(pkg) + } + + pub fn replacements(&self) -> &HashMap { + &self.replacements + } + + pub fn features(&self, pkg: &PackageId) -> &HashSet { + self.features.get(pkg).unwrap_or(&self.empty_features) + } + + pub fn features_sorted(&self, pkg: &PackageId) -> Vec<&str> { + let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref())); + v.sort(); + v + } + + pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { + PackageIdSpec::query_str(spec, self.iter()) + } + + pub fn unused_patches(&self) -> &[PackageId] { + &self.unused_patches + } +} + +impl fmt::Debug for Resolve { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "graph: {:?}\n", self.graph)?; + write!(fmt, "\nfeatures: {{\n")?; + for (pkg, features) in &self.features { + write!(fmt, " {}: {:?}\n", pkg, features)?; + } + write!(fmt, "}}") + } +} + +impl<'a> Iterator for Deps<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges.as_mut() + .and_then(|e| e.next()) + .map(|id| self.resolve.replacement(id).unwrap_or(id)) + } +} + +impl<'a> Iterator for DepsNotReplaced<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges.as_mut().and_then(|e| e.next()) + } +} + +struct RcList { + head: Option)>> +} + +impl RcList { + fn new() -> RcList { + RcList { head: None } + } + + fn push(&mut self, data: T) { + let node = Rc::new((data, RcList { head: self.head.take() })); + self.head = Some(node); + } +} + +// Not derived to avoid `T: Clone` +impl Clone for RcList { + fn clone(&self) -> RcList { + RcList { head: self.head.clone() } + } +} + +// Avoid stack overflows on drop by turning recursion into a loop +impl Drop for RcList { + fn drop(&mut self) { + let mut cur = self.head.take(); + while let Some(head) = cur { + match Rc::try_unwrap(head) { + Ok((_data, mut next)) => cur = next.head.take(), + Err(_) => break, + } + } + } +} + +enum GraphNode { + Add(PackageId), + Link(PackageId, PackageId), +} + +// A `Context` is basically a bunch of local resolution information which is +// kept around for all `BacktrackFrame` instances. As a result, this runs the +// risk of being cloned *a lot* so we want to make this as cheap to clone as +// possible. +#[derive(Clone)] +struct Context<'a> { + // TODO: Both this and the map below are super expensive to clone. We should + // switch to persistent hash maps if we can at some point or otherwise + // make these much cheaper to clone in general. + activations: Activations, + resolve_features: HashMap>, + + // These are two cheaply-cloneable lists (O(1) clone) which are effectively + // hash maps but are built up as "construction lists". We'll iterate these + // at the very end and actually construct the map that we're making. + resolve_graph: RcList, + resolve_replacements: RcList<(PackageId, PackageId)>, + + replacements: &'a [(PackageIdSpec, Dependency)], + + // These warnings are printed after resolution. + warnings: RcList, +} + +type Activations = HashMap>>; + +/// Builds the list of all packages required to build the first argument. +pub fn resolve(summaries: &[(Summary, Method)], + replacements: &[(PackageIdSpec, Dependency)], + registry: &mut Registry, + config: Option<&Config>) -> CargoResult { + let cx = Context { + resolve_graph: RcList::new(), + resolve_features: HashMap::new(), + resolve_replacements: RcList::new(), + activations: HashMap::new(), + replacements: replacements, + warnings: RcList::new(), + }; + let _p = profile::start("resolving"); + let cx = activate_deps_loop(cx, registry, summaries)?; + + let mut resolve = Resolve { + graph: cx.graph(), + empty_features: HashSet::new(), + checksums: HashMap::new(), + metadata: BTreeMap::new(), + replacements: cx.resolve_replacements(), + features: cx.resolve_features.iter().map(|(k, v)| { + (k.clone(), v.clone()) + }).collect(), + unused_patches: Vec::new(), + }; + + for summary in cx.activations.values() + .flat_map(|v| v.values()) + .flat_map(|v| v.iter()) { + let cksum = summary.checksum().map(|s| s.to_string()); + resolve.checksums.insert(summary.package_id().clone(), cksum); + } + + check_cycles(&resolve, &cx.activations)?; + trace!("resolved: {:?}", resolve); + + // If we have a shell, emit warnings about required deps used as feature. + if let Some(config) = config { + let mut shell = config.shell(); + let mut warnings = &cx.warnings; + while let Some(ref head) = warnings.head { + shell.warn(&head.0)?; + warnings = &head.1; + } + } + + Ok(resolve) +} + +/// Attempts to activate the summary `candidate` in the context `cx`. +/// +/// This function will pull dependency summaries from the registry provided, and +/// the dependencies of the package will be determined by the `method` provided. +/// If `candidate` was activated, this function returns the dependency frame to +/// iterate through next. +fn activate(cx: &mut Context, + registry: &mut Registry, + parent: Option<&Summary>, + candidate: Candidate, + method: &Method) + -> CargoResult> { + if let Some(parent) = parent { + cx.resolve_graph.push(GraphNode::Link(parent.package_id().clone(), + candidate.summary.package_id().clone())); + } + + let activated = cx.flag_activated(&candidate.summary, method); + + let candidate = match candidate.replace { + Some(replace) => { + cx.resolve_replacements.push((candidate.summary.package_id().clone(), + replace.package_id().clone())); + if cx.flag_activated(&replace, method) && activated { + return Ok(None); + } + trace!("activating {} (replacing {})", replace.package_id(), + candidate.summary.package_id()); + replace + } + None => { + if activated { + return Ok(None) + } + trace!("activating {}", candidate.summary.package_id()); + candidate.summary + } + }; + + let deps = cx.build_deps(registry, &candidate, method)?; + + Ok(Some(DepsFrame { + parent: candidate, + remaining_siblings: RcVecIter::new(Rc::new(deps)), + })) +} + +struct RcVecIter { + vec: Rc>, + rest: Range, +} + +impl RcVecIter { + fn new(vec: Rc>) -> RcVecIter { + RcVecIter { + rest: 0..vec.len(), + vec: vec, + } + } + + fn cur_index(&self) -> usize { + self.rest.start - 1 + } +} + +// Not derived to avoid `T: Clone` +impl Clone for RcVecIter { + fn clone(&self) -> RcVecIter { + RcVecIter { + vec: self.vec.clone(), + rest: self.rest.clone(), + } + } +} + +impl Iterator for RcVecIter where T: Clone { + type Item = (usize, T); + + fn next(&mut self) -> Option<(usize, T)> { + self.rest.next().and_then(|i| { + self.vec.get(i).map(|val| (i, val.clone())) + }) + } + + fn size_hint(&self) -> (usize, Option) { + self.rest.size_hint() + } +} + +#[derive(Clone)] +struct DepsFrame { + parent: Summary, + remaining_siblings: RcVecIter, +} + +impl DepsFrame { + /// Returns the least number of candidates that any of this frame's siblings + /// has. + /// + /// The `remaining_siblings` array is already sorted with the smallest + /// number of candidates at the front, so we just return the number of + /// candidates in that entry. + fn min_candidates(&self) -> usize { + self.remaining_siblings.clone().next().map(|(_, (_, candidates, _))| { + candidates.len() + }).unwrap_or(0) + } +} + +impl PartialEq for DepsFrame { + fn eq(&self, other: &DepsFrame) -> bool { + self.min_candidates() == other.min_candidates() + } +} + +impl Eq for DepsFrame {} + +impl PartialOrd for DepsFrame { + fn partial_cmp(&self, other: &DepsFrame) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for DepsFrame { + fn cmp(&self, other: &DepsFrame) -> Ordering { + // the frame with the sibling that has the least number of candidates + // needs to get the bubbled up to the top of the heap we use below, so + // reverse the order of the comparison here. + other.min_candidates().cmp(&self.min_candidates()) + } +} + +struct BacktrackFrame<'a> { + context_backup: Context<'a>, + deps_backup: BinaryHeap, + remaining_candidates: RemainingCandidates, + parent: Summary, + dep: Dependency, + features: Rc>, +} + +#[derive(Clone)] +struct RemainingCandidates { + remaining: RcVecIter, +} + +impl RemainingCandidates { + fn next(&mut self, prev_active: &[Summary]) -> Option { + // Filter the set of candidates based on the previously activated + // versions for this dependency. We can actually use a version if it + // precisely matches an activated version or if it is otherwise + // incompatible with all other activated versions. Note that we + // define "compatible" here in terms of the semver sense where if + // the left-most nonzero digit is the same they're considered + // compatible. + self.remaining.by_ref().map(|p| p.1).find(|b| { + prev_active.iter().any(|a| *a == b.summary) || + prev_active.iter().all(|a| { + !compatible(a.version(), b.summary.version()) + }) + }) + } +} + +/// Recursively activates the dependencies for `top`, in depth-first order, +/// backtracking across possible candidates for each dependency as necessary. +/// +/// If all dependencies can be activated and resolved to a version in the +/// dependency graph, cx.resolve is returned. +fn activate_deps_loop<'a>(mut cx: Context<'a>, + registry: &mut Registry, + summaries: &[(Summary, Method)]) + -> CargoResult> { + // Note that a `BinaryHeap` is used for the remaining dependencies that need + // activation. This heap is sorted such that the "largest value" is the most + // constrained dependency, or the one with the least candidates. + // + // This helps us get through super constrained portions of the dependency + // graph quickly and hopefully lock down what later larger dependencies can + // use (those with more candidates). + let mut backtrack_stack = Vec::new(); + let mut remaining_deps = BinaryHeap::new(); + for &(ref summary, ref method) in summaries { + debug!("initial activation: {}", summary.package_id()); + let candidate = Candidate { summary: summary.clone(), replace: None }; + remaining_deps.extend(activate(&mut cx, registry, None, candidate, + method)?); + } + + // Main resolution loop, this is the workhorse of the resolution algorithm. + // + // You'll note that a few stacks are maintained on the side, which might + // seem odd when this algorithm looks like it could be implemented + // recursively. While correct, this is implemented iteratively to avoid + // blowing the stack (the recusion depth is proportional to the size of the + // input). + // + // The general sketch of this loop is to run until there are no dependencies + // left to activate, and for each dependency to attempt to activate all of + // its own dependencies in turn. The `backtrack_stack` is a side table of + // backtracking states where if we hit an error we can return to in order to + // attempt to continue resolving. + while let Some(mut deps_frame) = remaining_deps.pop() { + let frame = match deps_frame.remaining_siblings.next() { + Some(sibling) => { + let parent = Summary::clone(&deps_frame.parent); + remaining_deps.push(deps_frame); + (parent, sibling) + } + None => continue, + }; + let (mut parent, (mut cur, (mut dep, candidates, mut features))) = frame; + assert!(!remaining_deps.is_empty()); + + let (next, has_another, remaining_candidates) = { + let prev_active = cx.prev_active(&dep); + trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(), + candidates.len()); + trace!("{}[{}]>{} {} prev activations", parent.name(), cur, + dep.name(), prev_active.len()); + let mut candidates = RemainingCandidates { + remaining: RcVecIter::new(Rc::clone(&candidates)), + }; + (candidates.next(prev_active), + candidates.clone().next(prev_active).is_some(), + candidates) + }; + + // Alright, for each candidate that's gotten this far, it meets the + // following requirements: + // + // 1. The version matches the dependency requirement listed for this + // package + // 2. There are no activated versions for this package which are + // semver-compatible, or there's an activated version which is + // precisely equal to `candidate`. + // + // This means that we're going to attempt to activate each candidate in + // turn. We could possibly fail to activate each candidate, so we try + // each one in turn. + let candidate = match next { + Some(candidate) => { + // We have a candidate. Add an entry to the `backtrack_stack` so + // we can try the next one if this one fails. + if has_another { + backtrack_stack.push(BacktrackFrame { + context_backup: Context::clone(&cx), + deps_backup: >::clone(&remaining_deps), + remaining_candidates: remaining_candidates, + parent: Summary::clone(&parent), + dep: Dependency::clone(&dep), + features: Rc::clone(&features), + }); + } + candidate + } + None => { + // This dependency has no valid candidate. Backtrack until we + // find a dependency that does have a candidate to try, and try + // to activate that one. This resets the `remaining_deps` to + // their state at the found level of the `backtrack_stack`. + trace!("{}[{}]>{} -- no candidates", parent.name(), cur, + dep.name()); + match find_candidate(&mut backtrack_stack, + &mut cx, + &mut remaining_deps, + &mut parent, + &mut cur, + &mut dep, + &mut features) { + None => return Err(activation_error(&cx, registry, &parent, + &dep, + cx.prev_active(&dep), + &candidates)), + Some(candidate) => candidate, + } + } + }; + + let method = Method::Required { + dev_deps: false, + features: &features, + uses_default_features: dep.uses_default_features(), + }; + trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), + candidate.summary.version()); + remaining_deps.extend(activate(&mut cx, registry, Some(&parent), + candidate, &method)?); + } + + Ok(cx) +} + +// Searches up `backtrack_stack` until it finds a dependency with remaining +// candidates. Resets `cx` and `remaining_deps` to that level and returns the +// next candidate. If all candidates have been exhausted, returns None. +fn find_candidate<'a>(backtrack_stack: &mut Vec>, + cx: &mut Context<'a>, + remaining_deps: &mut BinaryHeap, + parent: &mut Summary, + cur: &mut usize, + dep: &mut Dependency, + features: &mut Rc>) -> Option { + while let Some(mut frame) = backtrack_stack.pop() { + let (next, has_another) = { + let prev_active = frame.context_backup.prev_active(&frame.dep); + (frame.remaining_candidates.next(prev_active), + frame.remaining_candidates.clone().next(prev_active).is_some()) + }; + if let Some(candidate) = next { + if has_another { + *cx = frame.context_backup.clone(); + *remaining_deps = frame.deps_backup.clone(); + *parent = frame.parent.clone(); + *dep = frame.dep.clone(); + *features = Rc::clone(&frame.features); + backtrack_stack.push(frame); + } else { + *cx = frame.context_backup; + *remaining_deps = frame.deps_backup; + *parent = frame.parent; + *dep = frame.dep; + *features = frame.features; + } + *cur = remaining_deps.peek().unwrap().remaining_siblings.cur_index(); + return Some(candidate) + } + } + None +} + +fn activation_error(cx: &Context, + registry: &mut Registry, + parent: &Summary, + dep: &Dependency, + prev_active: &[Summary], + candidates: &[Candidate]) -> CargoError { + if !candidates.is_empty() { + let mut msg = format!("failed to select a version for `{}` \ + (required by `{}`):\n\ + all possible versions conflict with \ + previously selected versions of `{}`", + dep.name(), parent.name(), + dep.name()); + let graph = cx.graph(); + 'outer: for v in prev_active.iter() { + for node in graph.iter() { + let edges = match graph.edges(node) { + Some(edges) => edges, + None => continue, + }; + for edge in edges { + if edge != v.package_id() { continue } + + msg.push_str(&format!("\n version {} in use by {}", + v.version(), edge)); + continue 'outer; + } + } + msg.push_str(&format!("\n version {} in use by ??", + v.version())); + } + + msg.push_str(&format!("\n possible versions to select: {}", + candidates.iter() + .map(|v| v.summary.version()) + .map(|v| v.to_string()) + .collect::>() + .join(", "))); + + return msg.into() + } + + // Once we're all the way down here, we're definitely lost in the + // weeds! We didn't actually use any candidates above, so we need to + // give an error message that nothing was found. + // + // Note that we re-query the registry with a new dependency that + // allows any version so we can give some nicer error reporting + // which indicates a few versions that were actually found. + let all_req = semver::VersionReq::parse("*").unwrap(); + let mut new_dep = dep.clone(); + new_dep.set_version_req(all_req); + let mut candidates = match registry.query_vec(&new_dep) { + Ok(candidates) => candidates, + Err(e) => return e, + }; + candidates.sort_by(|a, b| { + b.version().cmp(a.version()) + }); + + let msg = if !candidates.is_empty() { + let versions = { + let mut versions = candidates.iter().take(3).map(|cand| { + cand.version().to_string() + }).collect::>(); + + if candidates.len() > 3 { + versions.push("...".into()); + } + + versions.join(", ") + }; + + let mut msg = format!("no matching version `{}` found for package `{}` \ + (required by `{}`)\n\ + location searched: {}\n\ + versions found: {}", + dep.version_req(), + dep.name(), + parent.name(), + dep.source_id(), + versions); + + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() + && dep.version_req().to_string().starts_with('=') { + msg.push_str("\nconsider running `cargo update` to update \ + a path dependency's locked version"); + } + + msg + } else { + format!("no matching package named `{}` found \ + (required by `{}`)\n\ + location searched: {}\n\ + version required: {}", + dep.name(), parent.name(), + dep.source_id(), + dep.version_req()) + }; + + msg.into() +} + +// Returns if `a` and `b` are compatible in the semver sense. This is a +// commutative operation. +// +// Versions `a` and `b` are compatible if their left-most nonzero digit is the +// same. +fn compatible(a: &semver::Version, b: &semver::Version) -> bool { + if a.major != b.major { return false } + if a.major != 0 { return true } + if a.minor != b.minor { return false } + if a.minor != 0 { return true } + a.patch == b.patch +} + +// Returns a pair of (feature dependencies, all used features) +// +// The feature dependencies map is a mapping of package name to list of features +// enabled. Each package should be enabled, and each package should have the +// specified set of features enabled. The boolean indicates whether this +// package was specifically requested (rather than just requesting features +// *within* this package). +// +// The all used features set is the set of features which this local package had +// enabled, which is later used when compiling to instruct the code what +// features were enabled. +fn build_features<'a>(s: &'a Summary, method: &'a Method) + -> CargoResult<(HashMap<&'a str, (bool, Vec)>, HashSet<&'a str>)> { + let mut deps = HashMap::new(); + let mut used = HashSet::new(); + let mut visited = HashSet::new(); + match *method { + Method::Everything => { + for key in s.features().keys() { + add_feature(s, key, &mut deps, &mut used, &mut visited)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + add_feature(s, dep.name(), &mut deps, &mut used, + &mut visited)?; + } + } + Method::Required { features: requested_features, .. } => { + for feat in requested_features.iter() { + add_feature(s, feat, &mut deps, &mut used, &mut visited)?; + } + } + } + match *method { + Method::Everything | + Method::Required { uses_default_features: true, .. } => { + if s.features().get("default").is_some() { + add_feature(s, "default", &mut deps, &mut used, + &mut visited)?; + } + } + Method::Required { uses_default_features: false, .. } => {} + } + return Ok((deps, used)); + + fn add_feature<'a>(s: &'a Summary, + feat: &'a str, + deps: &mut HashMap<&'a str, (bool, Vec)>, + used: &mut HashSet<&'a str>, + visited: &mut HashSet<&'a str>) -> CargoResult<()> { + if feat.is_empty() { return Ok(()) } + + // If this feature is of the form `foo/bar`, then we just lookup package + // `foo` and enable its feature `bar`. Otherwise this feature is of the + // form `foo` and we need to recurse to enable the feature `foo` for our + // own package, which may end up enabling more features or just enabling + // a dependency. + let mut parts = feat.splitn(2, '/'); + let feat_or_package = parts.next().unwrap(); + match parts.next() { + Some(feat) => { + let package = feat_or_package; + used.insert(package); + deps.entry(package) + .or_insert((false, Vec::new())) + .1.push(feat.to_string()); + } + None => { + let feat = feat_or_package; + + //if this feature has already been added, then just return Ok + if !visited.insert(feat) { + return Ok(()); + } + + used.insert(feat); + match s.features().get(feat) { + Some(recursive) => { + // This is a feature, add it recursively. + for f in recursive { + if f == feat { + bail!("Cyclic feature dependency: feature `{}` depends \ + on itself", feat); + } + + add_feature(s, f, deps, used, visited)?; + } + } + None => { + // This is a dependency, mark it as explicitly requested. + deps.entry(feat).or_insert((false, Vec::new())).0 = true; + } + } + } + } + Ok(()) + } +} + +impl<'a> Context<'a> { + // Activate this summary by inserting it into our list of known activations. + // + // Returns if this summary with the given method is already activated. + fn flag_activated(&mut self, + summary: &Summary, + method: &Method) -> bool { + let id = summary.package_id(); + let prev = self.activations + .entry(id.name().to_string()) + .or_insert_with(HashMap::new) + .entry(id.source_id().clone()) + .or_insert(Vec::new()); + if !prev.iter().any(|c| c == summary) { + self.resolve_graph.push(GraphNode::Add(id.clone())); + prev.push(summary.clone()); + return false + } + debug!("checking if {} is already activated", summary.package_id()); + let (features, use_default) = match *method { + Method::Required { features, uses_default_features, .. } => { + (features, uses_default_features) + } + Method::Everything => return false, + }; + + let has_default_feature = summary.features().contains_key("default"); + match self.resolve_features.get(id) { + Some(prev) => { + features.iter().all(|f| prev.contains(f)) && + (!use_default || prev.contains("default") || + !has_default_feature) + } + None => features.is_empty() && (!use_default || !has_default_feature) + } + } + + fn build_deps(&mut self, + registry: &mut Registry, + candidate: &Summary, + method: &Method) -> CargoResult> { + // First, figure out our set of dependencies based on the requsted set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let deps = self.resolve_features(candidate, method)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps.into_iter().map(|(dep, features)| { + let mut candidates = self.query(registry, &dep)?; + // When we attempt versions for a package, we'll want to start at + // the maximum version and work our way down. + candidates.sort_by(|a, b| { + b.summary.version().cmp(a.summary.version()) + }); + Ok((dep, Rc::new(candidates), Rc::new(features))) + }).collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + Ok(deps) + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + fn query(&self, + registry: &mut Registry, + dep: &Dependency) -> CargoResult> { + let mut ret = Vec::new(); + registry.query(dep, &mut |s| { + ret.push(Candidate { summary: s, replace: None }); + })?; + for candidate in ret.iter_mut() { + let summary = &candidate.summary; + + let mut potential_matches = self.replacements.iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!("found an override for {} {}", dep.name(), dep.version_req()); + + let mut summaries = registry.query_vec(dep)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + format!("no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, dep.source_id(), dep.version_req()) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries.iter().map(|s| { + format!(" * {}", s.package_id()) + }).collect::>(); + bail!("the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", spec, s.package_id(), + bullets.join("\n")); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + bail!("overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, spec, summary.package_id()); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.name(), dep.version_req()); + } + + candidate.replace = replace; + } + Ok(ret) + } + + fn prev_active(&self, dep: &Dependency) -> &[Summary] { + self.activations.get(dep.name()) + .and_then(|v| v.get(dep.source_id())) + .map(|v| &v[..]) + .unwrap_or(&[]) + } + + /// Return all dependencies and the features we want from them. + fn resolve_features<'b>(&mut self, + s: &'b Summary, + method: &'b Method) + -> CargoResult)>> { + let dev_deps = match *method { + Method::Everything => true, + Method::Required { dev_deps, .. } => dev_deps, + }; + + // First, filter by dev-dependencies + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + + let (mut feature_deps, used_features) = build_features(s, method)?; + let mut ret = Vec::new(); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a feature + if dep.is_optional() && !feature_deps.contains_key(dep.name()) { + continue + } + // So we want this dependency. Move the features we want from `feature_deps` + // to `ret`. + let base = feature_deps.remove(dep.name()).unwrap_or((false, vec![])); + if !dep.is_optional() && base.0 { + self.warnings.push( + format!("Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features. \ + This is currently a warning to ease the transition, but it will become an \ + error in the future.", + s.package_id(), dep.name()) + ); + } + let mut base = base.1; + base.extend(dep.features().iter().cloned()); + for feature in base.iter() { + if feature.contains("/") { + bail!("feature names may not contain slashes: `{}`", feature); + } + } + ret.push((dep.clone(), base)); + } + + // Any remaining entries in feature_deps are bugs in that the package does not actually + // have those dependencies. We classified them as dependencies in the first place + // because there is no such feature, either. + if !feature_deps.is_empty() { + let unknown = feature_deps.keys().map(|s| &s[..]) + .collect::>(); + let features = unknown.join(", "); + bail!("Package `{}` does not have these features: `{}`", + s.package_id(), features) + } + + // Record what list of features is active for this package. + if !used_features.is_empty() { + let pkgid = s.package_id(); + + let set = self.resolve_features.entry(pkgid.clone()) + .or_insert_with(HashSet::new); + for feature in used_features { + if !set.contains(feature) { + set.insert(feature.to_string()); + } + } + } + + Ok(ret) + } + + fn resolve_replacements(&self) -> HashMap { + let mut replacements = HashMap::new(); + let mut cur = &self.resolve_replacements; + while let Some(ref node) = cur.head { + let (k, v) = node.0.clone(); + replacements.insert(k, v); + cur = &node.1; + } + replacements + } + + fn graph(&self) -> Graph { + let mut graph = Graph::new(); + let mut cur = &self.resolve_graph; + while let Some(ref node) = cur.head { + match node.0 { + GraphNode::Add(ref p) => graph.add(p.clone(), &[]), + GraphNode::Link(ref a, ref b) => graph.link(a.clone(), b.clone()), + } + cur = &node.1; + } + graph + } +} + +fn check_cycles(resolve: &Resolve, activations: &Activations) + -> CargoResult<()> { + let summaries: HashMap<&PackageId, &Summary> = activations.values() + .flat_map(|v| v.values()) + .flat_map(|v| v) + .map(|s| (s.package_id(), s)) + .collect(); + + // Sort packages to produce user friendly deterministic errors. + let all_packages = resolve.iter().collect::>().into_sorted_vec(); + let mut checked = HashSet::new(); + for pkg in all_packages { + if !checked.contains(pkg) { + visit(resolve, + pkg, + &summaries, + &mut HashSet::new(), + &mut checked)? + } + } + return Ok(()); + + fn visit<'a>(resolve: &'a Resolve, + id: &'a PackageId, + summaries: &HashMap<&'a PackageId, &Summary>, + visited: &mut HashSet<&'a PackageId>, + checked: &mut HashSet<&'a PackageId>) + -> CargoResult<()> { + // See if we visited ourselves + if !visited.insert(id) { + bail!("cyclic package dependency: package `{}` depends on itself", + id); + } + + // If we've already checked this node no need to recurse again as we'll + // just conclude the same thing as last time, so we only execute the + // recursive step if we successfully insert into `checked`. + // + // Note that if we hit an intransitive dependency then we clear out the + // visitation list as we can't induce a cycle through transitive + // dependencies. + if checked.insert(id) { + let summary = summaries[id]; + for dep in resolve.deps_not_replaced(id) { + let is_transitive = summary.dependencies().iter().any(|d| { + d.matches_id(dep) && d.is_transitive() + }); + let mut empty = HashSet::new(); + let visited = if is_transitive {&mut *visited} else {&mut empty}; + visit(resolve, dep, summaries, visited, checked)?; + + if let Some(id) = resolve.replacement(dep) { + visit(resolve, id, summaries, visited, checked)?; + } + } + } + + // Ok, we're done, no longer visiting our node any more + visited.remove(id); + Ok(()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/shell.rs b/collector/compile-benchmarks/cargo/src/cargo/core/shell.rs new file mode 100644 index 000000000..6911339d8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/shell.rs @@ -0,0 +1,251 @@ +use std::fmt; +use std::io::prelude::*; + +use atty; +use termcolor::Color::{Green, Red, Yellow}; +use termcolor::{self, StandardStream, Color, ColorSpec, WriteColor}; + +use util::errors::CargoResult; + +/// The requested verbosity of output +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Verbosity { + Verbose, + Normal, + Quiet +} + +/// An abstraction around a `Write`able object that remembers preferences for output verbosity and +/// color. +pub struct Shell { + /// the `Write`able object, either with or without color support (represented by different enum + /// variants) + err: ShellOut, + /// How verbose messages should be + verbosity: Verbosity, +} + +impl fmt::Debug for Shell { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match &self.err { + &ShellOut::Write(_) => f.debug_struct("Shell") + .field("verbosity", &self.verbosity) + .finish(), + &ShellOut::Stream(_, color_choice) => f.debug_struct("Shell") + .field("verbosity", &self.verbosity) + .field("color_choice", &color_choice) + .finish() + } + } +} + +/// A `Write`able object, either with or without color support +enum ShellOut { + /// A plain write object without color support + Write(Box), + /// Color-enabled stdio, with information on whether color should be used + Stream(StandardStream, ColorChoice), +} + +/// Whether messages should use color output +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum ColorChoice { + /// Force color output + Always, + /// Force disable color output + Never, + /// Intelligently guess whether to use color output + CargoAuto, +} + +impl Shell { + /// Create a new shell (color choice and verbosity), defaulting to 'auto' color and verbose + /// output. + pub fn new() -> Shell { + Shell { + err: ShellOut::Stream( + StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()), + ColorChoice::CargoAuto, + ), + verbosity: Verbosity::Verbose, + } + } + + /// Create a shell from a plain writable object, with no color, and max verbosity. + pub fn from_write(out: Box) -> Shell { + Shell { + err: ShellOut::Write(out), + verbosity: Verbosity::Verbose, + } + } + + /// Print a message, where the status will have `color` color, and can be justified. The + /// messages follows without color. + fn print(&mut self, + status: &fmt::Display, + message: &fmt::Display, + color: Color, + justified: bool) -> CargoResult<()> { + match self.verbosity { + Verbosity::Quiet => Ok(()), + _ => { + self.err.print(status, message, color, justified) + } + } + } + + /// Get a reference to the underlying writer + pub fn err(&mut self) -> &mut Write { + self.err.as_write() + } + + /// Shortcut to right-align and color green a status message. + pub fn status(&mut self, status: T, message: U) -> CargoResult<()> + where T: fmt::Display, U: fmt::Display + { + self.print(&status, &message, Green, true) + } + + /// Shortcut to right-align a status message. + pub fn status_with_color(&mut self, + status: T, + message: U, + color: Color) -> CargoResult<()> + where T: fmt::Display, U: fmt::Display + { + self.print(&status, &message, color, true) + } + + /// Run the callback only if we are in verbose mode + pub fn verbose(&mut self, mut callback: F) -> CargoResult<()> + where F: FnMut(&mut Shell) -> CargoResult<()> + { + match self.verbosity { + Verbosity::Verbose => callback(self), + _ => Ok(()) + } + } + + /// Run the callback if we are not in verbose mode. + pub fn concise(&mut self, mut callback: F) -> CargoResult<()> + where F: FnMut(&mut Shell) -> CargoResult<()> + { + match self.verbosity { + Verbosity::Verbose => Ok(()), + _ => callback(self) + } + } + + /// Print a red 'error' message + pub fn error(&mut self, message: T) -> CargoResult<()> { + self.print(&"error:", &message, Red, false) + } + + /// Print an amber 'warning' message + pub fn warn(&mut self, message: T) -> CargoResult<()> { + match self.verbosity { + Verbosity::Quiet => Ok(()), + _ => self.print(&"warning:", &message, Yellow, false), + } + } + + /// Update the verbosity of the shell + pub fn set_verbosity(&mut self, verbosity: Verbosity) { + self.verbosity = verbosity; + } + + /// Get the verbosity of the shell + pub fn verbosity(&self) -> Verbosity { + self.verbosity + } + + /// Update the color choice (always, never, or auto) from a string. + pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { + if let ShellOut::Stream(ref mut err, ref mut cc) = self.err { + let cfg = match color { + Some("always") => ColorChoice::Always, + Some("never") => ColorChoice::Never, + + Some("auto") | + None => ColorChoice::CargoAuto, + + Some(arg) => bail!("argument for --color must be auto, always, or \ + never, but found `{}`", arg), + }; + *cc = cfg; + *err = StandardStream::stderr(cfg.to_termcolor_color_choice()); + } + Ok(()) + } + + /// Get the current color choice + /// + /// If we are not using a color stream, this will always return Never, even if the color choice + /// has been set to something else. + pub fn color_choice(&self) -> ColorChoice { + match self.err { + ShellOut::Stream(_, cc) => cc, + ShellOut::Write(_) => ColorChoice::Never, + } + } +} + +impl ShellOut { + /// Print out a message with a status. The status comes first and is bold + the given color. + /// The status can be justified, in which case the max width that will right align is 12 chars. + fn print(&mut self, + status: &fmt::Display, + message: &fmt::Display, + color: Color, + justified: bool) -> CargoResult<()> { + match *self { + ShellOut::Stream(ref mut err, _) => { + err.reset()?; + err.set_color(ColorSpec::new() + .set_bold(true) + .set_fg(Some(color)))?; + if justified { + write!(err, "{:>12}", status)?; + } else { + write!(err, "{}", status)?; + } + err.reset()?; + write!(err, " {}\n", message)?; + } + ShellOut::Write(ref mut w) => { + if justified { + write!(w, "{:>12}", status)?; + } else { + write!(w, "{}", status)?; + } + write!(w, " {}\n", message)?; + } + } + Ok(()) + } + + /// Get this object as a `io::Write`. + fn as_write(&mut self) -> &mut Write { + match *self { + ShellOut::Stream(ref mut err, _) => err, + ShellOut::Write(ref mut w) => w, + } + } +} + +impl ColorChoice { + /// Convert our color choice to termcolor's version + fn to_termcolor_color_choice(&self) -> termcolor::ColorChoice { + match *self { + ColorChoice::Always => termcolor::ColorChoice::Always, + ColorChoice::Never => termcolor::ColorChoice::Never, + ColorChoice::CargoAuto => { + if atty::is(atty::Stream::Stderr) { + termcolor::ColorChoice::Auto + } else { + termcolor::ColorChoice::Never + } + } + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/source.rs b/collector/compile-benchmarks/cargo/src/cargo/core/source.rs new file mode 100644 index 000000000..01d659919 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/source.rs @@ -0,0 +1,652 @@ +use std::cmp::{self, Ordering}; +use std::collections::hash_map::{HashMap, Values, IterMut}; +use std::fmt::{self, Formatter}; +use std::hash::{self, Hash}; +use std::path::Path; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::atomic::Ordering::SeqCst; + +use serde::ser; +use serde::de; +use url::Url; + +use core::{Package, PackageId, Registry}; +use ops; +use sources::git; +use sources::{PathSource, GitSource, RegistrySource, CRATES_IO}; +use sources::DirectorySource; +use util::{Config, CargoResult, ToUrl}; + +/// A Source finds and downloads remote packages based on names and +/// versions. +pub trait Source: Registry { + /// Returns the `SourceId` corresponding to this source + fn source_id(&self) -> &SourceId; + + /// The update method performs any network operations required to + /// get the entire list of all names, versions and dependencies of + /// packages managed by the Source. + fn update(&mut self) -> CargoResult<()>; + + /// The download method fetches the full package for each name and + /// version specified. + fn download(&mut self, package: &PackageId) -> CargoResult; + + /// Generates a unique string which represents the fingerprint of the + /// current state of the source. + /// + /// This fingerprint is used to determine the "fresheness" of the source + /// later on. It must be guaranteed that the fingerprint of a source is + /// constant if and only if the output product will remain constant. + /// + /// The `pkg` argument is the package which this fingerprint should only be + /// interested in for when this source may contain multiple packages. + fn fingerprint(&self, pkg: &Package) -> CargoResult; + + /// If this source supports it, verifies the source of the package + /// specified. + /// + /// Note that the source may also have performed other checksum-based + /// verification during the `download` step, but this is intended to be run + /// just before a crate is compiled so it may perform more expensive checks + /// which may not be cacheable. + fn verify(&self, _pkg: &PackageId) -> CargoResult<()> { + Ok(()) + } +} + +impl<'a, T: Source + ?Sized + 'a> Source for Box { + /// Forwards to `Source::source_id` + fn source_id(&self) -> &SourceId { + (**self).source_id() + } + + /// Forwards to `Source::update` + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + /// Forwards to `Source::download` + fn download(&mut self, id: &PackageId) -> CargoResult { + (**self).download(id) + } + + /// Forwards to `Source::fingerprint` + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } + + /// Forwards to `Source::verify` + fn verify(&self, pkg: &PackageId) -> CargoResult<()> { + (**self).verify(pkg) + } +} + +/// The possible kinds of code source. Along with a URL, this fully defines the +/// source +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum Kind { + /// Kind::Git() represents a git repository + Git(GitReference), + /// represents a local path + Path, + /// represents the central registry + Registry, + /// represents a local filesystem-based registry + LocalRegistry, + /// represents a directory-based registry + Directory, +} + +/// Information to find a specific commit in a git repository +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum GitReference { + /// from a tag + Tag(String), + /// from the HEAD of a branch + Branch(String), + /// from a specific revision + Rev(String), +} + +/// Unique identifier for a source of packages. +#[derive(Clone, Eq, Debug)] +pub struct SourceId { + inner: Arc, +} + +/// Unique identifier for a source of packages. +#[derive(Eq, Clone, Debug)] +struct SourceIdInner { + /// The source URL + url: Url, + /// `git::canonicalize_url(url)` for the url field + canonical_url: Url, + /// The source kind + kind: Kind, + // e.g. the exact git revision of the specified branch for a Git Source + precise: Option, +} + +impl SourceId { + /// Create a SourceId object from the kind and url. + /// + /// The canonical url will be calculated, but the precise field will not + fn new(kind: Kind, url: Url) -> CargoResult { + let source_id = SourceId { + inner: Arc::new(SourceIdInner { + kind: kind, + canonical_url: git::canonicalize_url(&url)?, + url: url, + precise: None, + }), + }; + Ok(source_id) + } + + /// Parses a source URL and returns the corresponding ID. + /// + /// ## Example + /// + /// ``` + /// use cargo::core::SourceId; + /// SourceId::from_url("git+https://github.com/alexcrichton/\ + /// libssh2-static-sys#80e71a3021618eb05\ + /// 656c58fb7c5ef5f12bc747f"); + /// ``` + pub fn from_url(string: &str) -> CargoResult { + let mut parts = string.splitn(2, '+'); + let kind = parts.next().unwrap(); + let url = parts.next().ok_or_else(|| format!("invalid source `{}`", string))?; + + match kind { + "git" => { + let mut url = url.to_url()?; + let mut reference = GitReference::Branch("master".to_string()); + for (k, v) in url.query_pairs() { + match &k[..] { + // map older 'ref' to branch + "branch" | + "ref" => reference = GitReference::Branch(v.into_owned()), + + "rev" => reference = GitReference::Rev(v.into_owned()), + "tag" => reference = GitReference::Tag(v.into_owned()), + _ => {} + } + } + let precise = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); + url.set_query(None); + Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) + }, + "registry" => { + let url = url.to_url()?; + Ok(SourceId::new(Kind::Registry, url)? + .with_precise(Some("locked".to_string()))) + } + "path" => { + let url = url.to_url()?; + SourceId::new(Kind::Path, url) + } + kind => Err(format!("unsupported source protocol: {}", kind).into()) + } + } + + /// A view of the `SourceId` that can be `Display`ed as a URL + pub fn to_url(&self) -> SourceIdToUrl { + SourceIdToUrl { inner: &*self.inner } + } + + /// Create a SourceId from a filesystem path. + /// + /// Pass absolute path + pub fn for_path(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::Path, url) + } + + /// Crate a SourceId from a git reference + pub fn for_git(url: &Url, reference: GitReference) -> CargoResult { + SourceId::new(Kind::Git(reference), url.clone()) + } + + /// Create a SourceId from a registry url + pub fn for_registry(url: &Url) -> CargoResult { + SourceId::new(Kind::Registry, url.clone()) + } + + /// Create a SourceId from a local registry path + pub fn for_local_registry(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::LocalRegistry, url) + } + + /// Create a SourceId from a directory path + pub fn for_directory(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::Directory, url) + } + + /// Returns the `SourceId` corresponding to the main repository. + /// + /// This is the main cargo registry by default, but it can be overridden in + /// a `.cargo/config`. + pub fn crates_io(config: &Config) -> CargoResult { + let cfg = ops::registry_configuration(config)?; + let url = if let Some(ref index) = cfg.index { + static WARNED: AtomicBool = ATOMIC_BOOL_INIT; + if !WARNED.swap(true, SeqCst) { + config.shell().warn("custom registry support via \ + the `registry.index` configuration is \ + being removed, this functionality \ + will not work in the future")?; + } + &index[..] + } else { + CRATES_IO + }; + let url = url.to_url()?; + SourceId::for_registry(&url) + } + + /// Get this source URL + pub fn url(&self) -> &Url { + &self.inner.url + } + + /// Is this source from a filesystem path + pub fn is_path(&self) -> bool { + self.inner.kind == Kind::Path + } + + /// Is this source from a registry (either local or not) + pub fn is_registry(&self) -> bool { + self.inner.kind == Kind::Registry || self.inner.kind == Kind::LocalRegistry + } + + /// Is this source from a git repository + pub fn is_git(&self) -> bool { + match self.inner.kind { + Kind::Git(_) => true, + _ => false, + } + } + + /// Creates an implementation of `Source` corresponding to this ID. + pub fn load<'a>(&self, config: &'a Config) -> CargoResult> { + trace!("loading SourceId; {}", self); + match self.inner.kind { + Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), + Kind::Path => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(PathSource::new(&path, self, config))) + } + Kind::Registry => Ok(Box::new(RegistrySource::remote(self, config))), + Kind::LocalRegistry => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(RegistrySource::local(self, &path, config))) + } + Kind::Directory => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(DirectorySource::new(&path, self, config))) + } + } + } + + /// Get the value of the precise field + pub fn precise(&self) -> Option<&str> { + self.inner.precise.as_ref().map(|s| &s[..]) + } + + /// Get the git reference if this is a git source, otherwise None. + pub fn git_reference(&self) -> Option<&GitReference> { + match self.inner.kind { + Kind::Git(ref s) => Some(s), + _ => None, + } + } + + /// Create a new SourceId from this source with the given `precise` + pub fn with_precise(&self, v: Option) -> SourceId { + SourceId { + inner: Arc::new(SourceIdInner { + precise: v, + ..(*self.inner).clone() + }) + } + } + + /// Whether the remote registry is the standard https://crates.io + pub fn is_default_registry(&self) -> bool { + match self.inner.kind { + Kind::Registry => {} + _ => return false, + } + self.inner.url.to_string() == CRATES_IO + } + + /// Hash `self` + /// + /// For paths, remove the workspace prefix so the same source will give the + /// same hash in different locations. + pub fn stable_hash(&self, workspace: &Path, into: &mut S) { + if self.is_path() { + if let Ok(p) = self.inner.url.to_file_path().unwrap().strip_prefix(workspace) { + self.inner.kind.hash(into); + p.to_str().unwrap().hash(into); + return + } + } + self.hash(into) + } +} + +impl PartialEq for SourceId { + fn eq(&self, other: &SourceId) -> bool { + (*self.inner).eq(&*other.inner) + } +} + +impl PartialOrd for SourceId { + fn partial_cmp(&self, other: &SourceId) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceId { + fn cmp(&self, other: &SourceId) -> Ordering { + self.inner.cmp(&other.inner) + } +} + +impl ser::Serialize for SourceId { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + if self.is_path() { + None::.serialize(s) + } else { + s.collect_str(&self.to_url()) + } + } +} + +impl<'de> de::Deserialize<'de> for SourceId { + fn deserialize(d: D) -> Result + where D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + SourceId::from_url(&string).map_err(de::Error::custom) + } +} + +impl fmt::Display for SourceId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { kind: Kind::Path, ref url, .. } => { + fmt::Display::fmt(url, f) + } + SourceIdInner { kind: Kind::Git(ref reference), ref url, + ref precise, .. } => { + write!(f, "{}", url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + + if let Some(ref s) = *precise { + let len = cmp::min(s.len(), 8); + write!(f, "#{}", &s[..len])?; + } + Ok(()) + } + SourceIdInner { kind: Kind::Registry, ref url, .. } | + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { + write!(f, "registry {}", url) + } + SourceIdInner { kind: Kind::Directory, ref url, .. } => { + write!(f, "dir {}", url) + } + } + } +} + +// This custom implementation handles situations such as when two git sources +// point at *almost* the same URL, but not quite, even when they actually point +// to the same repository. +/// This method tests for self and other values to be equal, and is used by ==. +/// +/// For git repositories, the canonical url is checked. +impl PartialEq for SourceIdInner { + fn eq(&self, other: &SourceIdInner) -> bool { + if self.kind != other.kind { + return false; + } + if self.url == other.url { + return true; + } + + match (&self.kind, &other.kind) { + (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { + ref1 == ref2 && self.canonical_url == other.canonical_url + } + _ => false, + } + } +} + +impl PartialOrd for SourceIdInner { + fn partial_cmp(&self, other: &SourceIdInner) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceIdInner { + fn cmp(&self, other: &SourceIdInner) -> Ordering { + match self.kind.cmp(&other.kind) { + Ordering::Equal => {} + ord => return ord, + } + match self.url.cmp(&other.url) { + Ordering::Equal => {} + ord => return ord, + } + match (&self.kind, &other.kind) { + (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { + (ref1, &self.canonical_url).cmp(&(ref2, &other.canonical_url)) + } + _ => self.kind.cmp(&other.kind), + } + } +} + +// The hash of SourceId is used in the name of some Cargo folders, so shouldn't +// vary. `as_str` gives the serialisation of a url (which has a spec) and so +// insulates against possible changes in how the url crate does hashing. +impl Hash for SourceId { + fn hash(&self, into: &mut S) { + self.inner.kind.hash(into); + match *self.inner { + SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => { + canonical_url.as_str().hash(into) + } + _ => self.inner.url.as_str().hash(into), + } + } +} + +/// A `Display`able view into a SourceId that will write it as a url +pub struct SourceIdToUrl<'a> { + inner: &'a SourceIdInner, +} + +impl<'a> fmt::Display for SourceIdToUrl<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { kind: Kind::Path, ref url, .. } => { + write!(f, "path+{}", url) + } + SourceIdInner { + kind: Kind::Git(ref reference), ref url, ref precise, .. + } => { + write!(f, "git+{}", url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + if let Some(precise) = precise.as_ref() { + write!(f, "#{}", precise)?; + } + Ok(()) + } + SourceIdInner { kind: Kind::Registry, ref url, .. } => { + write!(f, "registry+{}", url) + } + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { + write!(f, "local-registry+{}", url) + } + SourceIdInner { kind: Kind::Directory, ref url, .. } => { + write!(f, "directory+{}", url) + } + } + } +} + +impl GitReference { + /// Returns a `Display`able view of this git reference, or None if using + /// the head of the "master" branch + pub fn pretty_ref(&self) -> Option { + match *self { + GitReference::Branch(ref s) if *s == "master" => None, + _ => Some(PrettyRef { inner: self }), + } + } +} + +/// A git reference that can be `Display`ed +pub struct PrettyRef<'a> { + inner: &'a GitReference, +} + +impl<'a> fmt::Display for PrettyRef<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self.inner { + GitReference::Branch(ref b) => write!(f, "branch={}", b), + GitReference::Tag(ref s) => write!(f, "tag={}", s), + GitReference::Rev(ref s) => write!(f, "rev={}", s), + } + } +} + +/// A `HashMap` of `SourceId` -> `Box` +#[derive(Default)] +pub struct SourceMap<'src> { + map: HashMap>, +} + +/// A `std::collection::hash_map::Values` for `SourceMap` +pub type Sources<'a, 'src> = Values<'a, SourceId, Box>; + +/// A `std::collection::hash_map::IterMut` for `SourceMap` +pub struct SourcesMut<'a, 'src: 'a> { + inner: IterMut<'a, SourceId, Box>, +} + +impl<'src> SourceMap<'src> { + /// Create an empty map + pub fn new() -> SourceMap<'src> { + SourceMap { map: HashMap::new() } + } + + /// Like `HashMap::contains_key` + pub fn contains(&self, id: &SourceId) -> bool { + self.map.contains_key(id) + } + + /// Like `HashMap::get` + pub fn get(&self, id: &SourceId) -> Option<&(Source + 'src)> { + let source = self.map.get(id); + + source.map(|s| { + let s: &(Source + 'src) = &**s; + s + }) + } + + /// Like `HashMap::get_mut` + pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source + 'src)> { + self.map.get_mut(id).map(|s| { + let s: &mut (Source + 'src) = &mut **s; + s + }) + } + + /// Like `HashMap::get`, but first calculates the `SourceId` from a + /// `PackageId` + pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source + 'src)> { + self.get(pkg_id.source_id()) + } + + /// Like `HashMap::insert`, but derives the SourceId key from the Source + pub fn insert(&mut self, source: Box) { + let id = source.source_id().clone(); + self.map.insert(id, source); + } + + /// Like `HashMap::is_empty` + pub fn is_empty(&self) -> bool { + self.map.is_empty() + } + + /// Like `HashMap::len` + pub fn len(&self) -> usize { + self.map.len() + } + + /// Like `HashMap::values` + pub fn sources<'a>(&'a self) -> Sources<'a, 'src> { + self.map.values() + } + + /// Like `HashMap::iter_mut` + pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> { + SourcesMut { inner: self.map.iter_mut() } + } +} + +impl<'a, 'src> Iterator for SourcesMut<'a, 'src> { + type Item = (&'a SourceId, &'a mut (Source + 'src)); + fn next(&mut self) -> Option<(&'a SourceId, &'a mut (Source + 'src))> { + self.inner.next().map(|(a, b)| (a, &mut **b)) + } +} + +#[cfg(test)] +mod tests { + use super::{SourceId, Kind, GitReference}; + use util::ToUrl; + + #[test] + fn github_sources_equal() { + let loc = "https://github.com/foo/bar".to_url().unwrap(); + let master = Kind::Git(GitReference::Branch("master".to_string())); + let s1 = SourceId::new(master.clone(), loc).unwrap(); + + let loc = "git://github.com/foo/bar".to_url().unwrap(); + let s2 = SourceId::new(master, loc.clone()).unwrap(); + + assert_eq!(s1, s2); + + let foo = Kind::Git(GitReference::Branch("foo".to_string())); + let s3 = SourceId::new(foo, loc).unwrap(); + assert!(s1 != s3); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/summary.rs b/collector/compile-benchmarks/cargo/src/cargo/core/summary.rs new file mode 100644 index 000000000..734f73bd6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/summary.rs @@ -0,0 +1,124 @@ +use std::collections::BTreeMap; +use std::mem; +use std::rc::Rc; + +use semver::Version; +use core::{Dependency, PackageId, SourceId}; + +use util::CargoResult; + +/// Subset of a `Manifest`. Contains only the most important informations about +/// a package. +/// +/// Summaries are cloned, and should not be mutated after creation +#[derive(Debug, Clone)] +pub struct Summary { + inner: Rc, +} + +#[derive(Debug, Clone)] +struct Inner { + package_id: PackageId, + dependencies: Vec, + features: BTreeMap>, + checksum: Option, +} + +impl Summary { + pub fn new(pkg_id: PackageId, + dependencies: Vec, + features: BTreeMap>) -> CargoResult { + for dep in dependencies.iter() { + if features.get(dep.name()).is_some() { + bail!("Features and dependencies cannot have the \ + same name: `{}`", dep.name()) + } + if dep.is_optional() && !dep.is_transitive() { + bail!("Dev-dependencies are not allowed to be optional: `{}`", + dep.name()) + } + } + for (feature, list) in features.iter() { + for dep in list.iter() { + let mut parts = dep.splitn(2, '/'); + let dep = parts.next().unwrap(); + let is_reexport = parts.next().is_some(); + if !is_reexport && features.get(dep).is_some() { continue } + match dependencies.iter().find(|d| d.name() == dep) { + Some(d) => { + if d.is_optional() || is_reexport { continue } + bail!("Feature `{}` depends on `{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, dep) + } + None if is_reexport => { + bail!("Feature `{}` requires a feature of `{}` which is not a \ + dependency", feature, dep) + } + None => { + bail!("Feature `{}` includes `{}` which is neither \ + a dependency nor another feature", feature, dep) + } + } + } + } + Ok(Summary { + inner: Rc::new(Inner { + package_id: pkg_id, + dependencies: dependencies, + features: features, + checksum: None, + }), + }) + } + + pub fn package_id(&self) -> &PackageId { &self.inner.package_id } + pub fn name(&self) -> &str { self.package_id().name() } + pub fn version(&self) -> &Version { self.package_id().version() } + pub fn source_id(&self) -> &SourceId { self.package_id().source_id() } + pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies } + pub fn features(&self) -> &BTreeMap> { &self.inner.features } + pub fn checksum(&self) -> Option<&str> { + self.inner.checksum.as_ref().map(|s| &s[..]) + } + + pub fn override_id(mut self, id: PackageId) -> Summary { + Rc::make_mut(&mut self.inner).package_id = id; + self + } + + pub fn set_checksum(mut self, cksum: String) -> Summary { + Rc::make_mut(&mut self.inner).checksum = Some(cksum); + self + } + + pub fn map_dependencies(mut self, f: F) -> Summary + where F: FnMut(Dependency) -> Dependency { + { + let slot = &mut Rc::make_mut(&mut self.inner).dependencies; + let deps = mem::replace(slot, Vec::new()); + *slot = deps.into_iter().map(f).collect(); + } + self + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Summary { + let me = if self.package_id().source_id() == to_replace { + let new_id = self.package_id().with_source_id(replace_with); + self.override_id(new_id) + } else { + self + }; + me.map_dependencies(|dep| { + dep.map_source(to_replace, replace_with) + }) + } +} + +impl PartialEq for Summary { + fn eq(&self, other: &Summary) -> bool { + self.inner.package_id == other.inner.package_id + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/core/workspace.rs b/collector/compile-benchmarks/cargo/src/cargo/core/workspace.rs new file mode 100644 index 000000000..58b141269 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/core/workspace.rs @@ -0,0 +1,702 @@ +use std::collections::hash_map::{HashMap, Entry}; +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; +use std::slice; + +use glob::glob; +use url::Url; + +use core::{Package, VirtualManifest, EitherManifest, SourceId}; +use core::{PackageIdSpec, Dependency, Profile, Profiles}; +use util::{Config, Filesystem}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; +use util::toml::read_manifest; + +/// The core abstraction in Cargo for working with a workspace of crates. +/// +/// A workspace is often created very early on and then threaded through all +/// other functions. It's typically through this object that the current +/// package is loaded and/or learned about. +#[derive(Debug)] +pub struct Workspace<'cfg> { + config: &'cfg Config, + + // This path is a path to where the current cargo subcommand was invoked + // from. That is, this is the `--manifest-path` argument to Cargo, and + // points to the "main crate" that we're going to worry about. + current_manifest: PathBuf, + + // A list of packages found in this workspace. Always includes at least the + // package mentioned by `current_manifest`. + packages: Packages<'cfg>, + + // If this workspace includes more than one crate, this points to the root + // of the workspace. This is `None` in the case that `[workspace]` is + // missing, `package.workspace` is missing, and no `Cargo.toml` above + // `current_manifest` was found on the filesystem with `[workspace]`. + root_manifest: Option, + + // Shared target directory for all the packages of this workspace. + // `None` if the default path of `root/target` should be used. + target_dir: Option, + + // List of members in this workspace with a listing of all their manifest + // paths. The packages themselves can be looked up through the `packages` + // set above. + members: Vec, + + // True, if this is a temporary workspace created for the purposes of + // cargo install or cargo package. + is_ephemeral: bool, + + // True if this workspace should enforce optional dependencies even when + // not needed; false if this workspace should only enforce dependencies + // needed by the current configuration (such as in cargo install). + require_optional_deps: bool, +} + +// Separate structure for tracking loaded packages (to avoid loading anything +// twice), and this is separate to help appease the borrow checker. +#[derive(Debug)] +struct Packages<'cfg> { + config: &'cfg Config, + packages: HashMap, +} + +#[derive(Debug)] +enum MaybePackage { + Package(Package), + Virtual(VirtualManifest), +} + +/// Configuration of a workspace in a manifest. +#[derive(Debug, Clone)] +pub enum WorkspaceConfig { + /// Indicates that `[workspace]` was present and the members were + /// optionally specified as well. + Root(WorkspaceRootConfig), + + /// Indicates that `[workspace]` was present and the `root` field is the + /// optional value of `package.workspace`, if present. + Member { root: Option }, +} + +/// Intermediate configuration of a workspace root in a manifest. +/// +/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which +/// together tell if some path is recognized as a member by this root or not. +#[derive(Debug, Clone)] +pub struct WorkspaceRootConfig { + root_dir: PathBuf, + members: Option>, + exclude: Vec, +} + +/// An iterator over the member packages of a workspace, returned by +/// `Workspace::members` +pub struct Members<'a, 'cfg: 'a> { + ws: &'a Workspace<'cfg>, + iter: slice::Iter<'a, PathBuf>, +} + +impl<'cfg> Workspace<'cfg> { + /// Creates a new workspace given the target manifest pointed to by + /// `manifest_path`. + /// + /// This function will construct the entire workspace by determining the + /// root and all member packages. It will then validate the workspace + /// before returning it, so `Ok` is only returned for valid workspaces. + pub fn new(manifest_path: &Path, config: &'cfg Config) + -> CargoResult> { + let target_dir = config.target_dir()?; + + let mut ws = Workspace { + config: config, + current_manifest: manifest_path.to_path_buf(), + packages: Packages { + config: config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir: target_dir, + members: Vec::new(), + is_ephemeral: false, + require_optional_deps: true, + }; + ws.root_manifest = ws.find_root(manifest_path)?; + ws.find_members()?; + ws.validate()?; + Ok(ws) + } + + pub fn current_manifest(&self) -> &Path { + &self.current_manifest + } + + /// Creates a "temporary workspace" from one package which only contains + /// that package. + /// + /// This constructor will not touch the filesystem and only creates an + /// in-memory workspace. That is, all configuration is ignored, it's just + /// intended for that one package. + /// + /// This is currently only used in niche situations like `cargo install` or + /// `cargo package`. + pub fn ephemeral(package: Package, + config: &'cfg Config, + target_dir: Option, + require_optional_deps: bool) -> CargoResult> { + let mut ws = Workspace { + config: config, + current_manifest: package.manifest_path().to_path_buf(), + packages: Packages { + config: config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir: None, + members: Vec::new(), + is_ephemeral: true, + require_optional_deps: require_optional_deps, + }; + { + let key = ws.current_manifest.parent().unwrap(); + let package = MaybePackage::Package(package); + ws.packages.packages.insert(key.to_path_buf(), package); + ws.target_dir = if let Some(dir) = target_dir { + Some(dir) + } else { + ws.config.target_dir()? + }; + ws.members.push(ws.current_manifest.clone()); + } + Ok(ws) + } + + /// Returns the current package of this workspace. + /// + /// Note that this can return an error if it the current manifest is + /// actually a "virtual Cargo.toml", in which case an error is returned + /// indicating that something else should be passed. + pub fn current(&self) -> CargoResult<&Package> { + self.current_opt().ok_or_else(|| + format!("manifest path `{}` is a virtual manifest, but this \ + command requires running against an actual package in \ + this workspace", self.current_manifest.display()).into() + ) + } + + pub fn current_opt(&self) -> Option<&Package> { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(ref p) => Some(p), + MaybePackage::Virtual(..) => None + } + } + + pub fn is_virtual(&self) -> bool { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(..) => false, + MaybePackage::Virtual(..) => true + } + } + + /// Returns the `Config` this workspace is associated with. + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn profiles(&self) -> &Profiles { + let root = self.root_manifest.as_ref().unwrap_or(&self.current_manifest); + match *self.packages.get(root) { + MaybePackage::Package(ref p) => p.manifest().profiles(), + MaybePackage::Virtual(ref vm) => vm.profiles(), + } + } + + /// Returns the root path of this workspace. + /// + /// That is, this returns the path of the directory containing the + /// `Cargo.toml` which is the root of this workspace. + pub fn root(&self) -> &Path { + match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest + }.parent().unwrap() + } + + pub fn target_dir(&self) -> Filesystem { + self.target_dir.clone().unwrap_or_else(|| { + Filesystem::new(self.root().join("target")) + }) + } + + /// Returns the root [replace] section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { + let path = match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + }; + match *self.packages.get(path) { + MaybePackage::Package(ref p) => p.manifest().replace(), + MaybePackage::Virtual(ref vm) => vm.replace(), + } + } + + /// Returns the root [patch] section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_patch(&self) -> &HashMap> { + let path = match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + }; + match *self.packages.get(path) { + MaybePackage::Package(ref p) => p.manifest().patch(), + MaybePackage::Virtual(ref vm) => vm.patch(), + } + } + + /// Returns an iterator over all packages in this workspace + pub fn members<'a>(&'a self) -> Members<'a, 'cfg> { + Members { + ws: self, + iter: self.members.iter(), + } + } + + pub fn is_ephemeral(&self) -> bool { + self.is_ephemeral + } + + pub fn require_optional_deps(&self) -> bool { + self.require_optional_deps + } + + /// Finds the root of a workspace for the crate whose manifest is located + /// at `manifest_path`. + /// + /// This will parse the `Cargo.toml` at `manifest_path` and then interpret + /// the workspace configuration, optionally walking up the filesystem + /// looking for other workspace roots. + /// + /// Returns an error if `manifest_path` isn't actually a valid manifest or + /// if some other transient error happens. + fn find_root(&mut self, manifest_path: &Path) + -> CargoResult> { + fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult { + let path = member_manifest.parent().unwrap() + .join(root_link) + .join("Cargo.toml"); + debug!("find_root - pointer {}", path.display()); + Ok(paths::normalize_path(&path)) + }; + + { + let current = self.packages.load(manifest_path)?; + match *current.workspace_config() { + WorkspaceConfig::Root(_) => { + debug!("find_root - is root {}", manifest_path.display()); + return Ok(Some(manifest_path.to_path_buf())) + } + WorkspaceConfig::Member { root: Some(ref path_to_root) } => { + return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)) + } + WorkspaceConfig::Member { root: None } => {} + } + } + + for path in paths::ancestors(manifest_path).skip(2) { + let ances_manifest_path = path.join("Cargo.toml"); + debug!("find_root - trying {}", ances_manifest_path.display()); + if ances_manifest_path.exists() { + match *self.packages.load(&ances_manifest_path)?.workspace_config() { + WorkspaceConfig::Root(ref ances_root_config) => { + debug!("find_root - found a root checking exclusion"); + if !ances_root_config.is_excluded(&manifest_path) { + debug!("find_root - found!"); + return Ok(Some(ances_manifest_path)) + } + } + WorkspaceConfig::Member { root: Some(ref path_to_root) } => { + debug!("find_root - found pointer"); + return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?)) + } + WorkspaceConfig::Member { .. } => {} + } + } + } + + Ok(None) + } + + /// After the root of a workspace has been located, probes for all members + /// of a workspace. + /// + /// If the `workspace.members` configuration is present, then this just + /// verifies that those are all valid packages to point to. Otherwise, this + /// will transitively follow all `path` dependencies looking for members of + /// the workspace. + fn find_members(&mut self) -> CargoResult<()> { + let root_manifest_path = match self.root_manifest { + Some(ref path) => path.clone(), + None => { + debug!("find_members - only me as a member"); + self.members.push(self.current_manifest.clone()); + return Ok(()) + } + }; + + let members_paths = { + let root_package = self.packages.load(&root_manifest_path)?; + match *root_package.workspace_config() { + WorkspaceConfig::Root(ref root_config) => root_config.members_paths()?, + _ => bail!("root of a workspace inferred but wasn't a root: {}", + root_manifest_path.display()), + } + }; + + for path in members_paths { + self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?; + } + + self.find_path_deps(&root_manifest_path, &root_manifest_path, false) + } + + fn find_path_deps(&mut self, + manifest_path: &Path, + root_manifest: &Path, + is_path_dep: bool) -> CargoResult<()> { + let manifest_path = paths::normalize_path(manifest_path); + if self.members.iter().any(|p| p == &manifest_path) { + return Ok(()) + } + if is_path_dep + && !manifest_path.parent().unwrap().starts_with(self.root()) + && self.find_root(&manifest_path)? != self.root_manifest { + // If `manifest_path` is a path dependency outside of the workspace, + // don't add it, or any of its dependencies, as a members. + return Ok(()) + } + + match *self.packages.load(root_manifest)?.workspace_config() { + WorkspaceConfig::Root(ref root_config) => { + if root_config.is_excluded(&manifest_path) { + return Ok(()) + } + } + _ => {} + } + + debug!("find_members - {}", manifest_path.display()); + self.members.push(manifest_path.clone()); + + let candidates = { + let pkg = match *self.packages.load(&manifest_path)? { + MaybePackage::Package(ref p) => p, + MaybePackage::Virtual(_) => return Ok(()), + }; + pkg.dependencies() + .iter() + .map(|d| d.source_id()) + .filter(|d| d.is_path()) + .filter_map(|d| d.url().to_file_path().ok()) + .map(|p| p.join("Cargo.toml")) + .collect::>() + }; + for candidate in candidates { + self.find_path_deps(&candidate, root_manifest, true)?; + } + Ok(()) + } + + /// Validates a workspace, ensuring that a number of invariants are upheld: + /// + /// 1. A workspace only has one root. + /// 2. All workspace members agree on this one root as the root. + /// 3. The current crate is a member of this workspace. + fn validate(&mut self) -> CargoResult<()> { + if self.root_manifest.is_none() { + return Ok(()) + } + + let mut roots = Vec::new(); + { + let mut names = BTreeMap::new(); + for member in self.members.iter() { + let package = self.packages.get(member); + match *package.workspace_config() { + WorkspaceConfig::Root(_) => { + roots.push(member.parent().unwrap().to_path_buf()); + } + WorkspaceConfig::Member { .. } => {} + } + let name = match *package { + MaybePackage::Package(ref p) => p.name(), + MaybePackage::Virtual(_) => continue, + }; + if let Some(prev) = names.insert(name, member) { + bail!("two packages named `{}` in this workspace:\n\ + - {}\n\ + - {}", name, prev.display(), member.display()); + } + } + } + + match roots.len() { + 0 => { + bail!("`package.workspace` configuration points to a crate \ + which is not configured with [workspace]: \n\ + configuration at: {}\n\ + points to: {}", + self.current_manifest.display(), + self.root_manifest.as_ref().unwrap().display()) + } + 1 => {} + _ => { + bail!("multiple workspace roots found in the same workspace:\n{}", + roots.iter() + .map(|r| format!(" {}", r.display())) + .collect::>() + .join("\n")); + } + } + + for member in self.members.clone() { + let root = self.find_root(&member)?; + if root == self.root_manifest { + continue + } + + match root { + Some(root) => { + bail!("package `{}` is a member of the wrong workspace\n\ + expected: {}\n\ + actual: {}", + member.display(), + self.root_manifest.as_ref().unwrap().display(), + root.display()); + } + None => { + bail!("workspace member `{}` is not hierarchically below \ + the workspace root `{}`", + member.display(), + self.root_manifest.as_ref().unwrap().display()); + } + } + } + + if !self.members.contains(&self.current_manifest) { + let root = self.root_manifest.as_ref().unwrap(); + let root_dir = root.parent().unwrap(); + let current_dir = self.current_manifest.parent().unwrap(); + let root_pkg = self.packages.get(root); + + // FIXME: Make this more generic by using a relative path resolver between member and + // root. + let members_msg = match current_dir.strip_prefix(root_dir) { + Ok(rel) => { + format!("this may be fixable by adding `{}` to the \ + `workspace.members` array of the manifest \ + located at: {}", + rel.display(), + root.display()) + } + Err(_) => { + format!("this may be fixable by adding a member to \ + the `workspace.members` array of the \ + manifest located at: {}", root.display()) + } + }; + let extra = match *root_pkg { + MaybePackage::Virtual(_) => members_msg, + MaybePackage::Package(ref p) => { + let has_members_list = match *p.manifest().workspace_config() { + WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(), + WorkspaceConfig::Member { .. } => unreachable!(), + }; + if !has_members_list { + format!("this may be fixable by ensuring that this \ + crate is depended on by the workspace \ + root: {}", root.display()) + } else { + members_msg + } + } + }; + bail!("current package believes it's in a workspace when it's not:\n\ + current: {}\n\ + workspace: {}\n\n{}", + self.current_manifest.display(), + root.display(), + extra); + } + + if let Some(ref root_manifest) = self.root_manifest { + let default_profiles = Profiles { + release: Profile::default_release(), + dev: Profile::default_dev(), + test: Profile::default_test(), + test_deps: Profile::default_dev(), + bench: Profile::default_bench(), + bench_deps: Profile::default_release(), + doc: Profile::default_doc(), + custom_build: Profile::default_custom_build(), + check: Profile::default_check(), + doctest: Profile::default_doctest(), + }; + + for pkg in self.members().filter(|p| p.manifest_path() != root_manifest) { + if pkg.manifest().profiles() != &default_profiles { + let message = &format!("profiles for the non root package will be ignored, \ + specify profiles at the workspace root:\n\ + package: {}\n\ + workspace: {}", + pkg.manifest_path().display(), + root_manifest.display()); + + //TODO: remove `Eq` bound from `Profiles` when the warning is removed. + self.config.shell().warn(&message)?; + } + } + } + + Ok(()) + } +} + + +impl<'cfg> Packages<'cfg> { + fn get(&self, manifest_path: &Path) -> &MaybePackage { + &self.packages[manifest_path.parent().unwrap()] + } + + fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { + let key = manifest_path.parent().unwrap(); + match self.packages.entry(key.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(v) => { + let source_id = SourceId::for_path(key)?; + let (manifest, _nested_paths) = + read_manifest(manifest_path, &source_id, self.config)?; + Ok(v.insert(match manifest { + EitherManifest::Real(manifest) => { + MaybePackage::Package(Package::new(manifest, manifest_path)) + } + EitherManifest::Virtual(vm) => { + MaybePackage::Virtual(vm) + } + })) + } + } + } +} + +impl<'a, 'cfg> Members<'a, 'cfg> { + pub fn is_empty(self) -> bool { + self.count() == 0 + } +} + +impl<'a, 'cfg> Iterator for Members<'a, 'cfg> { + type Item = &'a Package; + + fn next(&mut self) -> Option<&'a Package> { + loop { + let next = self.iter.next().map(|path| { + self.ws.packages.get(path) + }); + match next { + Some(&MaybePackage::Package(ref p)) => return Some(p), + Some(&MaybePackage::Virtual(_)) => {} + None => return None, + } + } + } +} + +impl MaybePackage { + fn workspace_config(&self) -> &WorkspaceConfig { + match *self { + MaybePackage::Package(ref p) => p.manifest().workspace_config(), + MaybePackage::Virtual(ref vm) => vm.workspace_config(), + } + } +} + +impl WorkspaceRootConfig { + /// Create a new Intermediate Workspace Root configuration. + pub fn new( + root_dir: &Path, + members: &Option>, + exclude: &Option>, + ) -> WorkspaceRootConfig { + WorkspaceRootConfig { + root_dir: root_dir.to_path_buf(), + members: members.clone(), + exclude: exclude.clone().unwrap_or_default(), + } + } + + /// Checks the path against the `excluded` list. + /// + /// This method does NOT consider the `members` list. + fn is_excluded(&self, manifest_path: &Path) -> bool { + let excluded = self.exclude.iter().any(|ex| { + manifest_path.starts_with(self.root_dir.join(ex)) + }); + + let explicit_member = match self.members { + Some(ref members) => { + members.iter().any(|mem| { + manifest_path.starts_with(self.root_dir.join(mem)) + }) + } + None => false, + }; + + !explicit_member && excluded + } + + fn has_members_list(&self) -> bool { + self.members.is_some() + } + + fn members_paths(&self) -> CargoResult> { + let mut expanded_list = Vec::new(); + + if let Some(globs) = self.members.clone() { + for glob in globs { + let pathbuf = self.root_dir.join(glob); + let expanded_paths = Self::expand_member_path(&pathbuf)?; + + // If glob does not find any valid paths, then put the original + // path in the expanded list to maintain backwards compatibility. + if expanded_paths.is_empty() { + expanded_list.push(pathbuf); + } else { + expanded_list.extend(expanded_paths); + } + } + } + + Ok(expanded_list) + } + + fn expand_member_path(path: &Path) -> CargoResult> { + let path = match path.to_str() { + Some(p) => p, + None => return Ok(Vec::new()), + }; + let res = glob(path).chain_err(|| { + format!("could not parse pattern `{}`", &path) + })?; + res.map(|p| { + p.chain_err(|| { + format!("unable to match path to pattern `{}`", &path) + }) + }).collect() + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/lib.rs b/collector/compile-benchmarks/cargo/src/cargo/lib.rs new file mode 100755 index 000000000..f20118b80 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/lib.rs @@ -0,0 +1,255 @@ +#![deny(unused)] +#![cfg_attr(test, deny(warnings))] +#![recursion_limit="128"] + +#[macro_use] extern crate error_chain; +#[macro_use] extern crate log; +#[macro_use] extern crate scoped_tls; +#[macro_use] extern crate serde_derive; +#[macro_use] extern crate serde_json; +extern crate atty; +extern crate crates_io as registry; +extern crate crossbeam; +extern crate curl; +extern crate docopt; +extern crate filetime; +extern crate flate2; +extern crate fs2; +extern crate git2; +extern crate glob; +extern crate hex; +extern crate home; +extern crate ignore; +extern crate jobserver; +extern crate libc; +extern crate libgit2_sys; +extern crate num_cpus; +extern crate same_file; +extern crate semver; +extern crate serde; +extern crate serde_ignored; +extern crate shell_escape; +extern crate tar; +extern crate tempdir; +extern crate termcolor; +extern crate toml; +extern crate url; +#[cfg(target_os = "macos")] +extern crate core_foundation; + +use std::fmt; +use std::error::Error; + +use error_chain::ChainedError; +use serde::Deserialize; +use serde::ser; +use docopt::Docopt; + +use core::Shell; +use core::shell::Verbosity::Verbose; + +pub use util::{CargoError, CargoErrorKind, CargoResult, CliError, CliResult, Config}; + +pub const CARGO_ENV: &'static str = "CARGO"; + +macro_rules! bail { + ($($fmt:tt)*) => ( + return Err(::util::errors::CargoError::from(format_args!($($fmt)*).to_string())) + ) +} + +pub mod core; +pub mod ops; +pub mod sources; +pub mod util; + +pub struct CommitInfo { + pub short_commit_hash: String, + pub commit_hash: String, + pub commit_date: String, +} + +pub struct CfgInfo { + // Information about the git repository we may have been built from. + pub commit_info: Option, + // The release channel we were built for. + pub release_channel: String, +} + +pub struct VersionInfo { + pub major: String, + pub minor: String, + pub patch: String, + pub pre_release: Option, + // Information that's only available when we were built with + // configure/make, rather than cargo itself. + pub cfg_info: Option, +} + +impl fmt::Display for VersionInfo { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "cargo {}.{}.{}", + self.major, self.minor, self.patch)?; + if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { + if channel != "stable" { + write!(f, "-{}", channel)?; + let empty = String::from(""); + write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; + } + }; + + if let Some(ref cfg) = self.cfg_info { + if let Some(ref ci) = cfg.commit_info { + write!(f, " ({} {})", + ci.short_commit_hash, ci.commit_date)?; + } + }; + Ok(()) + } +} + +pub fn call_main_without_stdin<'de, Flags: Deserialize<'de>>( + exec: fn(Flags, &mut Config) -> CliResult, + config: &mut Config, + usage: &str, + args: &[String], + options_first: bool) -> CliResult +{ + let docopt = Docopt::new(usage).unwrap() + .options_first(options_first) + .argv(args.iter().map(|s| &s[..])) + .help(true); + + let flags = docopt.deserialize().map_err(|e| { + let code = if e.fatal() {1} else {0}; + CliError::new(e.to_string().into(), code) + })?; + + exec(flags, config) +} + +pub fn print_json(obj: &T) { + let encoded = serde_json::to_string(&obj).unwrap(); + println!("{}", encoded); +} + +pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { + debug!("exit_with_error; err={:?}", err); + + let CliError { error, exit_code, unknown } = err; + // exit_code == 0 is non-fatal error, e.g. docopt version info + let fatal = exit_code != 0; + + let hide = unknown && shell.verbosity() != Verbose; + + if let Some(error) = error { + if hide { + drop(shell.error("An unknown error occurred")) + } else if fatal { + drop(shell.error(&error)) + } else { + drop(writeln!(shell.err(), "{}", error)) + } + + if !handle_cause(error, shell) || hide { + drop(writeln!(shell.err(), "\nTo learn more, run the command again \ + with --verbose.")); + } + } + + std::process::exit(exit_code) +} + +pub fn handle_error(err: CargoError, shell: &mut Shell) { + debug!("handle_error; err={:?}", &err); + + let _ignored_result = shell.error(&err); + handle_cause(err, shell); +} + +fn handle_cause(cargo_err: E, shell: &mut Shell) -> bool + where E: ChainedError + 'static +{ + fn print(error: String, shell: &mut Shell) { + drop(writeln!(shell.err(), "\nCaused by:")); + drop(writeln!(shell.err(), " {}", error)); + } + + //Error inspection in non-verbose mode requires inspecting the + //error kind to avoid printing Internal errors. The downcasting + //machinery requires &(Error + 'static), but the iterator (and + //underlying `cause`) return &Error. Because the borrows are + //constrained to this handling method, and because the original + //error object is constrained to be 'static, we're casting away + //the borrow's actual lifetime for purposes of downcasting and + //inspecting the error chain + unsafe fn extend_lifetime(r: &Error) -> &(Error + 'static) { + std::mem::transmute::<&Error, &Error>(r) + } + + let verbose = shell.verbosity(); + + if verbose == Verbose { + //The first error has already been printed to the shell + //Print all remaining errors + for err in cargo_err.iter().skip(1) { + print(err.to_string(), shell); + } + } else { + //The first error has already been printed to the shell + //Print remaining errors until one marked as Internal appears + for err in cargo_err.iter().skip(1) { + let err = unsafe { extend_lifetime(err) }; + if let Some(&CargoError(CargoErrorKind::Internal(..), ..)) = + err.downcast_ref::() { + return false; + } + + print(err.to_string(), shell); + } + } + + true +} + +pub fn version() -> VersionInfo { + macro_rules! env_str { + ($name:expr) => { env!($name).to_string() } + } + macro_rules! option_env_str { + ($name:expr) => { option_env!($name).map(|s| s.to_string()) } + } + match option_env!("CFG_RELEASE_CHANNEL") { + // We have environment variables set up from configure/make. + Some(_) => { + let commit_info = + option_env!("CFG_COMMIT_HASH").map(|s| { + CommitInfo { + commit_hash: s.to_string(), + short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(), + commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(), + } + }); + VersionInfo { + major: env_str!("CARGO_PKG_VERSION_MAJOR"), + minor: env_str!("CARGO_PKG_VERSION_MINOR"), + patch: env_str!("CARGO_PKG_VERSION_PATCH"), + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: Some(CfgInfo { + release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(), + commit_info: commit_info, + }), + } + }, + // We are being compiled by Cargo itself. + None => { + VersionInfo { + major: env_str!("CARGO_PKG_VERSION_MAJOR"), + minor: env_str!("CARGO_PKG_VERSION_MINOR"), + patch: env_str!("CARGO_PKG_VERSION_PATCH"), + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: None, + } + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_clean.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_clean.rs new file mode 100644 index 000000000..9b266b0bb --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_clean.rs @@ -0,0 +1,109 @@ +use std::default::Default; +use std::fs; +use std::path::Path; + +use core::{Profiles, Workspace}; +use util::Config; +use util::errors::{CargoResult, CargoResultExt}; +use ops::{self, Context, BuildConfig, Kind, Unit}; + +pub struct CleanOptions<'a> { + pub spec: &'a [String], + pub target: Option<&'a str>, + pub config: &'a Config, + pub release: bool, +} + +/// Cleans the project from build artifacts. +pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { + let target_dir = ws.target_dir(); + + // If we have a spec, then we need to delete some packages, otherwise, just + // remove the whole target directory and be done with it! + // + // Note that we don't bother grabbing a lock here as we're just going to + // blow it all away anyway. + if opts.spec.is_empty() { + let target_dir = target_dir.into_path_unlocked(); + return rm_rf(&target_dir); + } + + let (packages, resolve) = ops::resolve_ws(ws)?; + + let profiles = ws.profiles(); + let host_triple = opts.config.rustc()?.host.clone(); + let mut cx = Context::new(ws, &resolve, &packages, opts.config, + BuildConfig { + host_triple, + requested_target: opts.target.map(|s| s.to_owned()), + release: opts.release, + jobs: 1, + ..BuildConfig::default() + }, + profiles)?; + let mut units = Vec::new(); + + for spec in opts.spec { + // Translate the spec to a Package + let pkgid = resolve.query(spec)?; + let pkg = packages.get(pkgid)?; + + // Generate all relevant `Unit` targets for this package + for target in pkg.targets() { + for kind in [Kind::Host, Kind::Target].iter() { + let Profiles { + ref release, ref dev, ref test, ref bench, ref doc, + ref custom_build, ref test_deps, ref bench_deps, ref check, + ref doctest, + } = *profiles; + let profiles = [release, dev, test, bench, doc, custom_build, + test_deps, bench_deps, check, doctest]; + for profile in profiles.iter() { + units.push(Unit { + pkg, + target, + profile, + kind: *kind, + }); + } + } + } + } + + cx.probe_target_info(&units)?; + + for unit in units.iter() { + rm_rf(&cx.fingerprint_dir(unit))?; + if unit.target.is_custom_build() { + if unit.profile.run_custom_build { + rm_rf(&cx.build_script_out_dir(unit))?; + } else { + rm_rf(&cx.build_script_dir(unit))?; + } + continue + } + + for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() { + rm_rf(src)?; + if let Some(ref dst) = *link_dst { + rm_rf(dst)?; + } + } + } + + Ok(()) +} + +fn rm_rf(path: &Path) -> CargoResult<()> { + let m = fs::metadata(path); + if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { + fs::remove_dir_all(path).chain_err(|| { + "could not remove build directory" + })?; + } else if m.is_ok() { + fs::remove_file(path).chain_err(|| { + "failed to remove build artifact" + })?; + } + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_compile.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_compile.rs new file mode 100644 index 000000000..685911203 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_compile.rs @@ -0,0 +1,784 @@ +//! +//! Cargo compile currently does the following steps: +//! +//! All configurations are already injected as environment variables via the +//! main cargo command +//! +//! 1. Read the manifest +//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as +//! stdin +//! +//! a. Shell out to `--do update` and `--do list` for each source +//! b. Resolve dependencies and return a list of name/version/source +//! +//! 3. Shell out to `--do download` for each source +//! 4. Shell out to `--do get` for each source, and build up the list of paths +//! to pass to rustc -L +//! 5. Call `cargo-rustc` with the results of the resolver zipped together with +//! the results of the `get` +//! +//! a. Topologically sort the dependencies +//! b. Compile each dependency in order, passing in the -L's pointing at each +//! previously compiled dependency +//! + +use std::collections::{HashMap, HashSet}; +use std::default::Default; +use std::path::PathBuf; +use std::sync::Arc; + +use core::{Source, Package, Target}; +use core::{Profile, TargetKind, Profiles, Workspace, PackageId, PackageIdSpec}; +use core::resolver::Resolve; +use ops::{self, BuildOutput, Executor, DefaultExecutor}; +use util::config::Config; +use util::{CargoResult, profile}; +use util::errors::{CargoResultExt, CargoError}; + +/// Contains information about how a package should be compiled. +#[derive(Debug)] +pub struct CompileOptions<'a> { + pub config: &'a Config, + /// Number of concurrent jobs to use. + pub jobs: Option, + /// The target platform to compile for (example: `i686-unknown-linux-gnu`). + pub target: Option<&'a str>, + /// Extra features to build for the root package + pub features: &'a [String], + /// Flag whether all available features should be built for the root package + pub all_features: bool, + /// Flag if the default feature should be built for the root package + pub no_default_features: bool, + /// A set of packages to build. + pub spec: Packages<'a>, + /// Filter to apply to the root package to select which targets will be + /// built. + pub filter: CompileFilter<'a>, + /// Whether this is a release build or not + pub release: bool, + /// Mode for this compile. + pub mode: CompileMode, + /// `--error_format` flag for the compiler. + pub message_format: MessageFormat, + /// Extra arguments to be passed to rustdoc (for main crate and dependencies) + pub target_rustdoc_args: Option<&'a [String]>, + /// The specified target will be compiled with all the available arguments, + /// note that this only accounts for the *final* invocation of rustc + pub target_rustc_args: Option<&'a [String]>, +} + +impl<'a> CompileOptions<'a> { + pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> + { + CompileOptions { + config: config, + jobs: None, + target: None, + features: &[], + all_features: false, + no_default_features: false, + spec: ops::Packages::Packages(&[]), + mode: mode, + release: false, + filter: CompileFilter::Default { required_features_filterable: false }, + message_format: MessageFormat::Human, + target_rustdoc_args: None, + target_rustc_args: None, + } + } +} + +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum CompileMode { + Test, + Build, + Check, + Bench, + Doc { deps: bool }, + Doctest, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize)] +pub enum MessageFormat { + Human, + Json +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub enum Packages<'a> { + All, + OptOut(&'a [String]), + Packages(&'a [String]), +} + +impl<'a> Packages<'a> { + pub fn from_flags(virtual_ws: bool, all: bool, exclude: &'a [String], package: &'a [String]) + -> CargoResult + { + let all = all || (virtual_ws && package.is_empty()); + + let packages = match (all, &exclude) { + (true, exclude) if exclude.is_empty() => Packages::All, + (true, exclude) => Packages::OptOut(exclude), + (false, exclude) if !exclude.is_empty() => bail!("--exclude can only be used together \ + with --all"), + _ => Packages::Packages(package), + }; + + Ok(packages) + } + + pub fn into_package_id_specs(self, ws: &Workspace) -> CargoResult> { + let specs = match self { + Packages::All => { + ws.members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect() + } + Packages::OptOut(opt_out) => { + ws.members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none()) + .collect() + } + Packages::Packages(packages) if packages.is_empty() => { + ws.current_opt() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .into_iter().collect() + } + Packages::Packages(packages) => { + packages.iter().map(|p| PackageIdSpec::parse(p)).collect::>>()? + } + }; + Ok(specs) + } +} + +#[derive(Clone, Copy, Debug)] +pub enum FilterRule<'a> { + All, + Just (&'a [String]), +} + +#[derive(Debug)] +pub enum CompileFilter<'a> { + Default { + /// Flag whether targets can be safely skipped when required-features are not satisfied. + required_features_filterable: bool, + }, + Only { + lib: bool, + bins: FilterRule<'a>, + examples: FilterRule<'a>, + tests: FilterRule<'a>, + benches: FilterRule<'a>, + } +} + +pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>) + -> CargoResult> { + compile_with_exec(ws, options, Arc::new(DefaultExecutor)) +} + +pub fn compile_with_exec<'a>(ws: &Workspace<'a>, + options: &CompileOptions<'a>, + exec: Arc) + -> CargoResult> { + for member in ws.members() { + for warning in member.manifest().warnings().iter() { + if warning.is_critical { + let err: CargoResult<_> = Err(CargoError::from(warning.message.to_owned())); + return err.chain_err(|| { + format!("failed to parse manifest at `{}`", member.manifest_path().display()) + }) + } else { + options.config.shell().warn(&warning.message)? + } + } + } + compile_ws(ws, None, options, exec) +} + +pub fn compile_ws<'a>(ws: &Workspace<'a>, + source: Option>, + options: &CompileOptions<'a>, + exec: Arc) + -> CargoResult> { + let CompileOptions { config, jobs, target, spec, features, + all_features, no_default_features, + release, mode, message_format, + ref filter, + ref target_rustdoc_args, + ref target_rustc_args } = *options; + + let target = target.map(|s| s.to_string()); + + if jobs == Some(0) { + bail!("jobs must be at least 1") + } + + let profiles = ws.profiles(); + + let specs = spec.into_package_id_specs(ws)?; + let resolve = ops::resolve_ws_precisely(ws, + source, + features, + all_features, + no_default_features, + &specs)?; + let (packages, resolve_with_overrides) = resolve; + + if specs.is_empty() { + return Err(format!("manifest path `{}` contains no package: The manifest is virtual, \ + and the workspace has no members.", ws.current_manifest().display()).into()); + }; + + let to_builds = specs.iter().map(|p| { + let pkgid = p.query(resolve_with_overrides.iter())?; + let p = packages.get(pkgid)?; + p.manifest().print_teapot(ws.config()); + Ok(p) + }).collect::>>()?; + + let mut general_targets = Vec::new(); + let mut package_targets = Vec::new(); + + match (*target_rustc_args, *target_rustdoc_args) { + (Some(..), _) | + (_, Some(..)) if to_builds.len() != 1 => { + panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags") + } + (Some(args), _) => { + let all_features = resolve_all_features(&resolve_with_overrides, + to_builds[0].package_id()); + let targets = generate_targets(to_builds[0], profiles, + mode, filter, &all_features, release)?; + if targets.len() == 1 { + let (target, profile) = targets[0]; + let mut profile = profile.clone(); + profile.rustc_args = Some(args.to_vec()); + general_targets.push((target, profile)); + } else { + bail!("extra arguments to `rustc` can only be passed to one \ + target, consider filtering\nthe package by passing \ + e.g. `--lib` or `--bin NAME` to specify a single target") + } + } + (None, Some(args)) => { + let all_features = resolve_all_features(&resolve_with_overrides, + to_builds[0].package_id()); + let targets = generate_targets(to_builds[0], profiles, + mode, filter, &all_features, release)?; + if targets.len() == 1 { + let (target, profile) = targets[0]; + let mut profile = profile.clone(); + profile.rustdoc_args = Some(args.to_vec()); + general_targets.push((target, profile)); + } else { + bail!("extra arguments to `rustdoc` can only be passed to one \ + target, consider filtering\nthe package by passing e.g. \ + `--lib` or `--bin NAME` to specify a single target") + } + } + (None, None) => { + for &to_build in to_builds.iter() { + let all_features = resolve_all_features(&resolve_with_overrides, + to_build.package_id()); + let targets = generate_targets(to_build, profiles, mode, + filter, &all_features, release)?; + package_targets.push((to_build, targets)); + } + } + }; + + for &(target, ref profile) in &general_targets { + for &to_build in to_builds.iter() { + package_targets.push((to_build, vec![(target, profile)])); + } + } + + let mut ret = { + let _p = profile::start("compiling"); + let mut build_config = scrape_build_config(config, jobs, target)?; + build_config.release = release; + build_config.test = mode == CompileMode::Test || mode == CompileMode::Bench; + build_config.json_messages = message_format == MessageFormat::Json; + if let CompileMode::Doc { deps } = mode { + build_config.doc_all = deps; + } + + ops::compile_targets(ws, + &package_targets, + &packages, + &resolve_with_overrides, + config, + build_config, + profiles, + exec)? + }; + + ret.to_doc_test = to_builds.into_iter().cloned().collect(); + + return Ok(ret); + + fn resolve_all_features(resolve_with_overrides: &Resolve, + package_id: &PackageId) + -> HashSet { + let mut features = resolve_with_overrides.features(package_id).clone(); + + // Include features enabled for use by dependencies so targets can also use them with the + // required-features field when deciding whether to be built or skipped. + let deps = resolve_with_overrides.deps(package_id); + for dep in deps { + for feature in resolve_with_overrides.features(dep) { + features.insert(dep.name().to_string() + "/" + feature); + } + } + + features + } +} + +impl<'a> FilterRule<'a> { + pub fn new(targets: &'a [String], all: bool) -> FilterRule<'a> { + if all { + FilterRule::All + } else { + FilterRule::Just(targets) + } + } + + fn matches(&self, target: &Target) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(targets) => { + targets.iter().any(|x| *x == target.name()) + }, + } + } + + fn is_specific(&self) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(targets) => !targets.is_empty(), + } + } + + pub fn try_collect(&self) -> Option> { + match *self { + FilterRule::All => None, + FilterRule::Just(targets) => Some(targets.to_vec()), + } + } +} + +impl<'a> CompileFilter<'a> { + pub fn new(lib_only: bool, + bins: &'a [String], all_bins: bool, + tsts: &'a [String], all_tsts: bool, + exms: &'a [String], all_exms: bool, + bens: &'a [String], all_bens: bool, + all_targets: bool) -> CompileFilter<'a> { + let rule_bins = FilterRule::new(bins, all_bins); + let rule_tsts = FilterRule::new(tsts, all_tsts); + let rule_exms = FilterRule::new(exms, all_exms); + let rule_bens = FilterRule::new(bens, all_bens); + + if all_targets { + CompileFilter::Only { + lib: true, bins: FilterRule::All, + examples: FilterRule::All, benches: FilterRule::All, + tests: FilterRule::All, + } + } else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific() + || rule_exms.is_specific() || rule_bens.is_specific() { + CompileFilter::Only { + lib: lib_only, bins: rule_bins, + examples: rule_exms, benches: rule_bens, + tests: rule_tsts, + } + } else { + CompileFilter::Default { + required_features_filterable: true, + } + } + } + + pub fn matches(&self, target: &Target) -> bool { + match *self { + CompileFilter::Default { .. } => true, + CompileFilter::Only { lib, bins, examples, tests, benches } => { + let rule = match *target.kind() { + TargetKind::Bin => bins, + TargetKind::Test => tests, + TargetKind::Bench => benches, + TargetKind::ExampleBin | + TargetKind::ExampleLib(..) => examples, + TargetKind::Lib(..) => return lib, + TargetKind::CustomBuild => return false, + }; + rule.matches(target) + } + } + } + + pub fn is_specific(&self) -> bool { + match *self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { .. } => true, + } + } +} + +#[derive(Clone, Copy, Debug)] +struct BuildProposal<'a> { + target: &'a Target, + profile: &'a Profile, + required: bool, +} + +fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target], + profile: &'a Profile, + dep: &'a Profile, + required_features_filterable: bool) -> Vec> { + match mode { + CompileMode::Bench => { + targets.iter().filter(|t| t.benched()).map(|t| { + BuildProposal { + target: t, + profile: profile, + required: !required_features_filterable, + } + }).collect::>() + } + CompileMode::Test => { + let mut base = targets.iter().filter(|t| { + t.tested() + }).map(|t| { + BuildProposal { + target: t, + profile: if t.is_example() {dep} else {profile}, + required: !required_features_filterable, + } + }).collect::>(); + + // Always compile the library if we're testing everything as + // it'll be needed for doctests + if let Some(t) = targets.iter().find(|t| t.is_lib()) { + if t.doctested() { + base.push(BuildProposal { + target: t, + profile: dep, + required: !required_features_filterable, + }); + } + } + base + } + CompileMode::Build | CompileMode::Check => { + targets.iter().filter(|t| { + t.is_bin() || t.is_lib() + }).map(|t| BuildProposal { + target: t, + profile: profile, + required: !required_features_filterable, + }).collect() + } + CompileMode::Doc { .. } => { + targets.iter().filter(|t| { + t.documented() + }).map(|t| BuildProposal { + target: t, + profile: profile, + required: !required_features_filterable, + }).collect() + } + CompileMode::Doctest => { + if let Some(t) = targets.iter().find(|t| t.is_lib()) { + if t.doctested() { + return vec![BuildProposal { + target: t, + profile: profile, + required: !required_features_filterable, + }]; + } + } + + Vec::new() + } + } +} + +/// Given a filter rule and some context, propose a list of targets +fn propose_indicated_targets<'a>(pkg: &'a Package, + rule: FilterRule, + desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + profile: &'a Profile) -> CargoResult>> { + match rule { + FilterRule::All => { + let result = pkg.targets().iter().filter(|t| is_expected_kind(t)).map(|t| { + BuildProposal { + target: t, + profile: profile, + required: false, + } + }); + Ok(result.collect()) + } + FilterRule::Just(names) => { + let mut targets = Vec::new(); + for name in names { + let target = pkg.targets().iter().find(|t| { + t.name() == *name && is_expected_kind(t) + }); + let t = match target { + Some(t) => t, + None => { + let suggestion = pkg.find_closest_target(name, is_expected_kind); + match suggestion { + Some(s) => { + let suggested_name = s.name(); + bail!("no {} target named `{}`\n\nDid you mean `{}`?", + desc, name, suggested_name) + } + None => bail!("no {} target named `{}`", desc, name), + } + } + }; + debug!("found {} `{}`", desc, name); + targets.push(BuildProposal { + target: t, + profile: profile, + required: true, + }); + } + Ok(targets) + } + } +} + +/// Collect the targets that are libraries or have all required features available. +fn filter_compatible_targets<'a>(mut proposals: Vec>, + features: &HashSet) + -> CargoResult> { + let mut compatible = Vec::with_capacity(proposals.len()); + for proposal in proposals.drain(..) { + let unavailable_features = match proposal.target.required_features() { + Some(rf) => rf.iter().filter(|f| !features.contains(*f)).collect(), + None => Vec::new(), + }; + if proposal.target.is_lib() || unavailable_features.is_empty() { + compatible.push((proposal.target, proposal.profile)); + } else if proposal.required { + let required_features = proposal.target.required_features().unwrap(); + let quoted_required_features: Vec = required_features.iter() + .map(|s| format!("`{}`",s)) + .collect(); + bail!("target `{}` requires the features: {}\n\ + Consider enabling them by passing e.g. `--features=\"{}\"`", + proposal.target.name(), + quoted_required_features.join(", "), + required_features.join(" ")); + } + } + Ok(compatible) +} + +/// Given the configuration for a build, this function will generate all +/// target/profile combinations needed to be built. +fn generate_targets<'a>(pkg: &'a Package, + profiles: &'a Profiles, + mode: CompileMode, + filter: &CompileFilter, + features: &HashSet, + release: bool) + -> CargoResult> { + let build = if release {&profiles.release} else {&profiles.dev}; + let test = if release {&profiles.bench} else {&profiles.test}; + let profile = match mode { + CompileMode::Test => test, + CompileMode::Bench => &profiles.bench, + CompileMode::Build => build, + CompileMode::Check => &profiles.check, + CompileMode::Doc { .. } => &profiles.doc, + CompileMode::Doctest => &profiles.doctest, + }; + + let targets = match *filter { + CompileFilter::Default { required_features_filterable } => { + let deps = if release { + &profiles.bench_deps + } else { + &profiles.test_deps + }; + generate_auto_targets(mode, pkg.targets(), profile, deps, required_features_filterable) + } + CompileFilter::Only { lib, bins, examples, tests, benches } => { + let mut targets = Vec::new(); + + if lib { + if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { + targets.push(BuildProposal { + target: t, + profile: profile, + required: true, + }); + } else { + bail!("no library targets found") + } + } + + targets.append(&mut propose_indicated_targets( + pkg, bins, "bin", Target::is_bin, profile)?); + targets.append(&mut propose_indicated_targets( + pkg, examples, "example", Target::is_example, build)?); + targets.append(&mut propose_indicated_targets( + pkg, tests, "test", Target::is_test, test)?); + targets.append(&mut propose_indicated_targets( + pkg, benches, "bench", Target::is_bench, &profiles.bench)?); + targets + } + }; + + filter_compatible_targets(targets, features) +} + +/// Parse all config files to learn about build configuration. Currently +/// configured options are: +/// +/// * build.jobs +/// * build.target +/// * target.$target.ar +/// * target.$target.linker +/// * target.$target.libfoo.metadata +fn scrape_build_config(config: &Config, + jobs: Option, + target: Option) + -> CargoResult { + if jobs.is_some() && config.jobserver_from_env().is_some() { + config.shell().warn("a `-j` argument was passed to Cargo but Cargo is \ + also configured with an external jobserver in \ + its environment, ignoring the `-j` parameter")?; + } + let cfg_jobs = match config.get_i64("build.jobs")? { + Some(v) => { + if v.val <= 0 { + bail!("build.jobs must be positive, but found {} in {}", + v.val, v.definition) + } else if v.val >= i64::from(u32::max_value()) { + bail!("build.jobs is too large: found {} in {}", v.val, + v.definition) + } else { + Some(v.val as u32) + } + } + None => None, + }; + let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); + let cfg_target = config.get_string("build.target")?.map(|s| s.val); + let target = target.or(cfg_target); + let mut base = ops::BuildConfig { + host_triple: config.rustc()?.host.clone(), + requested_target: target.clone(), + jobs: jobs, + ..Default::default() + }; + base.host = scrape_target_config(config, &base.host_triple)?; + base.target = match target.as_ref() { + Some(triple) => scrape_target_config(config, triple)?, + None => base.host.clone(), + }; + Ok(base) +} + +fn scrape_target_config(config: &Config, triple: &str) + -> CargoResult { + + let key = format!("target.{}", triple); + let mut ret = ops::TargetConfig { + ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val), + linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val), + overrides: HashMap::new(), + }; + let table = match config.get_table(&key)? { + Some(table) => table.val, + None => return Ok(ret), + }; + for (lib_name, value) in table { + match lib_name.as_str() { + "ar" | "linker" | "runner" | "rustflags" => { + continue + }, + _ => {} + } + + let mut output = BuildOutput { + library_paths: Vec::new(), + library_links: Vec::new(), + cfgs: Vec::new(), + env: Vec::new(), + metadata: Vec::new(), + rerun_if_changed: Vec::new(), + rerun_if_env_changed: Vec::new(), + warnings: Vec::new(), + }; + // We require deterministic order of evaluation, so we must sort the pairs by key first. + let mut pairs = Vec::new(); + for (k, value) in value.table(&lib_name)?.0 { + pairs.push((k,value)); + } + pairs.sort_by_key( |p| p.0 ); + for (k,value) in pairs{ + let key = format!("{}.{}", key, k); + match &k[..] { + "rustc-flags" => { + let (flags, definition) = value.string(k)?; + let whence = format!("in `{}` (in {})", key, + definition.display()); + let (paths, links) = + BuildOutput::parse_rustc_flags(flags, &whence) + ?; + output.library_paths.extend(paths); + output.library_links.extend(links); + } + "rustc-link-lib" => { + let list = value.list(k)?; + output.library_links.extend(list.iter() + .map(|v| v.0.clone())); + } + "rustc-link-search" => { + let list = value.list(k)?; + output.library_paths.extend(list.iter().map(|v| { + PathBuf::from(&v.0) + })); + } + "rustc-cfg" => { + let list = value.list(k)?; + output.cfgs.extend(list.iter().map(|v| v.0.clone())); + } + "rustc-env" => { + for (name, val) in value.table(k)?.0 { + let val = val.string(name)?.0; + output.env.push((name.clone(), val.to_string())); + } + } + "warning" | + "rerun-if-changed" | + "rerun-if-env-changed" => { + bail!("`{}` is not supported in build script overrides", k); + } + _ => { + let val = value.string(k)?.0; + output.metadata.push((k.clone(), val.to_string())); + } + } + } + ret.overrides.insert(lib_name, output); + } + + Ok(ret) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_doc.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_doc.rs new file mode 100644 index 000000000..d4d562036 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_doc.rs @@ -0,0 +1,149 @@ +use std::collections::HashMap; +use std::fs; +use std::path::Path; +use std::process::Command; + +use core::Workspace; +use ops; +use util::CargoResult; + +pub struct DocOptions<'a> { + pub open_result: bool, + pub compile_opts: ops::CompileOptions<'a>, +} + +pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { + let specs = options.compile_opts.spec.into_package_id_specs(ws)?; + let resolve = ops::resolve_ws_precisely(ws, + None, + options.compile_opts.features, + options.compile_opts.all_features, + options.compile_opts.no_default_features, + &specs)?; + let (packages, resolve_with_overrides) = resolve; + + if specs.is_empty() { + return Err(format!("manifest path `{}` contains no package: The manifest is virtual, \ + and the workspace has no members.", ws.current_manifest().display()).into()); + }; + + let pkgs = specs.iter().map(|p| { + let pkgid = p.query(resolve_with_overrides.iter())?; + packages.get(pkgid) + }).collect::>>()?; + + let mut lib_names = HashMap::new(); + let mut bin_names = HashMap::new(); + for package in &pkgs { + for target in package.targets().iter().filter(|t| t.documented()) { + if target.is_lib() { + if let Some(prev) = lib_names.insert(target.crate_name(), package) { + bail!("The library `{}` is specified by packages `{}` and \ + `{}` but can only be documented once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), prev, package); + } + } else { + if let Some(prev) = bin_names.insert(target.crate_name(), package) { + bail!("The binary `{}` is specified by packages `{}` and \ + `{}` but can be documented only once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), prev, package); + } + } + } + for (bin, bin_package) in bin_names.iter() { + if let Some(lib_package) = lib_names.get(bin) { + bail!("The target `{}` is specified as a library {}. It can be \ + documented only once. Consider renaming or marking one \ + of the targets as `doc = false`.", + bin, + if lib_package == bin_package { + format!("and as a binary by package `{}`", lib_package) + } else { + format!("by package `{}` and as a binary by \ + package `{}`", lib_package, bin_package) + }); + } + } + } + + ops::compile(ws, &options.compile_opts)?; + + if options.open_result { + let name = if pkgs.len() > 1 { + bail!("Passing multiple packages and `open` is not supported") + } else if pkgs.len() == 1 { + pkgs[0].name().replace("-", "_") + } else { + match lib_names.keys().chain(bin_names.keys()).nth(0) { + Some(s) => s.to_string(), + None => return Ok(()), + } + }; + + // Don't bother locking here as if this is getting deleted there's + // nothing we can do about it and otherwise if it's getting overwritten + // then that's also ok! + let mut target_dir = ws.target_dir(); + if let Some(triple) = options.compile_opts.target { + target_dir.push(Path::new(triple).file_stem().unwrap()); + } + let path = target_dir.join("doc").join(&name).join("index.html"); + let path = path.into_path_unlocked(); + if fs::metadata(&path).is_ok() { + let mut shell = options.compile_opts.config.shell(); + shell.status("Opening", path.display())?; + match open_docs(&path) { + Ok(m) => shell.status("Launching", m)?, + Err(e) => { + shell.warn( + "warning: could not determine a browser to open docs with, tried:")?; + for method in e { + shell.warn(format!("\t{}", method))?; + } + } + } + } + } + + Ok(()) +} + +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + use std::env; + let mut methods = Vec::new(); + // trying $BROWSER + if let Ok(name) = env::var("BROWSER") { + match Command::new(name).arg(path).status() { + Ok(_) => return Ok("$BROWSER"), + Err(_) => methods.push("$BROWSER"), + } + } + + for m in ["xdg-open", "gnome-open", "kde-open"].iter() { + match Command::new(m).arg(path).status() { + Ok(_) => return Ok(m), + Err(_) => methods.push(m), + } + } + + Err(methods) +} + +#[cfg(target_os = "windows")] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + match Command::new("cmd").arg("/C").arg(path).status() { + Ok(_) => Ok("cmd /C"), + Err(_) => Err(vec!["cmd /C"]), + } +} + +#[cfg(target_os = "macos")] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + match Command::new("open").arg(path).status() { + Ok(_) => Ok("open"), + Err(_) => Err(vec!["open"]), + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_fetch.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_fetch.rs new file mode 100644 index 000000000..80dfdd085 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_fetch.rs @@ -0,0 +1,12 @@ +use core::{Resolve, PackageSet, Workspace}; +use ops; +use util::CargoResult; + +/// Executes `cargo fetch`. +pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> { + let (packages, resolve) = ops::resolve_ws(ws)?; + for id in resolve.iter() { + packages.get(id)?; + } + Ok((resolve, packages)) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_generate_lockfile.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_generate_lockfile.rs new file mode 100644 index 000000000..d07ee9672 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_generate_lockfile.rs @@ -0,0 +1,184 @@ +use std::collections::{BTreeMap, HashSet}; + +use core::PackageId; +use core::registry::PackageRegistry; +use core::{Resolve, SourceId, Workspace}; +use core::resolver::Method; +use ops; +use util::config::Config; +use util::CargoResult; + +pub struct UpdateOptions<'a> { + pub config: &'a Config, + pub to_update: &'a [String], + pub precise: Option<&'a str>, + pub aggressive: bool, +} + +pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = ops::resolve_with_previous(&mut registry, ws, + Method::Everything, + None, None, &[], true)?; + ops::write_pkg_lockfile(ws, &resolve)?; + Ok(()) +} + +pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) + -> CargoResult<()> { + + if opts.aggressive && opts.precise.is_some() { + bail!("cannot specify both aggressive and precise simultaneously") + } + + if ws.members().is_empty() { + bail!("you can't generate a lockfile for an empty workspace.") + } + + let previous_resolve = match ops::load_pkg_lockfile(ws)? { + Some(resolve) => resolve, + None => return generate_lockfile(ws), + }; + let mut registry = PackageRegistry::new(opts.config)?; + let mut to_avoid = HashSet::new(); + + if opts.to_update.is_empty() { + to_avoid.extend(previous_resolve.iter()); + } else { + let mut sources = Vec::new(); + for name in opts.to_update { + let dep = previous_resolve.query(name)?; + if opts.aggressive { + fill_with_deps(&previous_resolve, dep, &mut to_avoid, + &mut HashSet::new()); + } else { + to_avoid.insert(dep); + sources.push(match opts.precise { + Some(precise) => { + // TODO: see comment in `resolve.rs` as well, but this + // seems like a pretty hokey reason to single out + // the registry as well. + let precise = if dep.source_id().is_registry() { + format!("{}={}", dep.name(), precise) + } else { + precise.to_string() + }; + dep.source_id().clone().with_precise(Some(precise)) + } + None => { + dep.source_id().clone().with_precise(None) + } + }); + } + } + registry.add_sources(&sources)?; + } + + let resolve = ops::resolve_with_previous(&mut registry, + ws, + Method::Everything, + Some(&previous_resolve), + Some(&to_avoid), + &[], + true)?; + + // Summarize what is changing for the user. + let print_change = |status: &str, msg: String| { + opts.config.shell().status(status, msg) + }; + for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { + if removed.len() == 1 && added.len() == 1 { + let msg = if removed[0].source_id().is_git() { + format!("{} -> #{}", removed[0], + &added[0].source_id().precise().unwrap()[..8]) + } else { + format!("{} -> v{}", removed[0], added[0].version()) + }; + print_change("Updating", msg)?; + } else { + for package in removed.iter() { + print_change("Removing", format!("{}", package))?; + } + for package in added.iter() { + print_change("Adding", format!("{}", package))?; + } + } + } + + ops::write_pkg_lockfile(ws, &resolve)?; + return Ok(()); + + fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, + set: &mut HashSet<&'a PackageId>, + visited: &mut HashSet<&'a PackageId>) { + if !visited.insert(dep) { + return + } + set.insert(dep); + for dep in resolve.deps(dep) { + fill_with_deps(resolve, dep, set, visited); + } + } + + fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve, + resolve: &'a Resolve) -> + Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { + fn key(dep: &PackageId) -> (&str, &SourceId) { + (dep.name(), dep.source_id()) + } + + // Removes all package ids in `b` from `a`. Note that this is somewhat + // more complicated because the equality for source ids does not take + // precise versions into account (e.g. git shas), but we want to take + // that into account here. + fn vec_subtract<'a>(a: &[&'a PackageId], + b: &[&'a PackageId]) -> Vec<&'a PackageId> { + a.iter().filter(|a| { + // If this package id is not found in `b`, then it's definitely + // in the subtracted set + let i = match b.binary_search(a) { + Ok(i) => i, + Err(..) => return true, + }; + + // If we've found `a` in `b`, then we iterate over all instances + // (we know `b` is sorted) and see if they all have different + // precise versions. If so, then `a` isn't actually in `b` so + // we'll let it through. + // + // Note that we only check this for non-registry sources, + // however, as registries contain enough version information in + // the package id to disambiguate + if a.source_id().is_registry() { + return false + } + b[i..].iter().take_while(|b| a == b).all(|b| { + a.source_id().precise() != b.source_id().precise() + }) + }).cloned().collect() + } + + // Map (package name, package source) to (removed versions, added versions). + let mut changes = BTreeMap::new(); + let empty = (Vec::new(), Vec::new()); + for dep in previous_resolve.iter() { + changes.entry(key(dep)).or_insert(empty.clone()).0.push(dep); + } + for dep in resolve.iter() { + changes.entry(key(dep)).or_insert(empty.clone()).1.push(dep); + } + + for v in changes.values_mut() { + let (ref mut old, ref mut new) = *v; + old.sort(); + new.sort(); + let removed = vec_subtract(old, new); + let added = vec_subtract(new, old); + *old = removed; + *new = added; + } + debug!("{:#?}", changes); + + changes.into_iter().map(|(_, v)| v).collect() + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_install.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_install.rs new file mode 100644 index 000000000..12fe51c44 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_install.rs @@ -0,0 +1,633 @@ +use std::collections::btree_map::Entry; +use std::collections::{BTreeMap, BTreeSet}; +use std::{env, fs}; +use std::io::prelude::*; +use std::io::SeekFrom; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use semver::{Version, VersionReq}; +use tempdir::TempDir; +use toml; + +use core::{SourceId, Source, Package, Dependency, PackageIdSpec}; +use core::{PackageId, Workspace}; +use ops::{self, CompileFilter, DefaultExecutor}; +use sources::{GitSource, PathSource, SourceConfigMap}; +use util::{Config, internal}; +use util::{Filesystem, FileLock}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +#[derive(Deserialize, Serialize)] +#[serde(untagged)] +enum CrateListing { + V1(CrateListingV1), + Empty(Empty), +} + +#[derive(Deserialize, Serialize)] +#[serde(deny_unknown_fields)] +struct Empty {} + +#[derive(Deserialize, Serialize)] +struct CrateListingV1 { + v1: BTreeMap>, +} + +struct Transaction { + bins: Vec, +} + +impl Transaction { + fn success(mut self) { + self.bins.clear(); + } +} + +impl Drop for Transaction { + fn drop(&mut self) { + for bin in self.bins.iter() { + let _ = fs::remove_file(bin); + } + } +} + +pub fn install(root: Option<&str>, + krates: Vec<&str>, + source_id: &SourceId, + vers: Option<&str>, + opts: &ops::CompileOptions, + force: bool) -> CargoResult<()> { + let root = resolve_root(root, opts.config)?; + let map = SourceConfigMap::new(opts.config)?; + + let (installed_anything, scheduled_error) = if krates.len() <= 1 { + install_one(&root, &map, krates.into_iter().next(), source_id, vers, opts, + force, true)?; + (true, false) + } else { + let mut succeeded = vec![]; + let mut failed = vec![]; + let mut first = true; + for krate in krates { + let root = root.clone(); + let map = map.clone(); + match install_one(&root, &map, Some(krate), source_id, vers, + opts, force, first) { + Ok(()) => succeeded.push(krate), + Err(e) => { + ::handle_error(e, &mut opts.config.shell()); + failed.push(krate) + } + } + first = false; + } + + let mut summary = vec![]; + if !succeeded.is_empty() { + summary.push(format!("Successfully installed {}!", succeeded.join(", "))); + } + if !failed.is_empty() { + summary.push(format!("Failed to install {} (see error(s) above).", failed.join(", "))); + } + if !succeeded.is_empty() || !failed.is_empty() { + opts.config.shell().status("\nSummary:", summary.join(" "))?; + } + + (!succeeded.is_empty(), !failed.is_empty()) + }; + + if installed_anything { + // Print a warning that if this directory isn't in PATH that they won't be + // able to run these commands. + let dst = metadata(opts.config, &root)?.parent().join("bin"); + let path = env::var_os("PATH").unwrap_or_default(); + for path in env::split_paths(&path) { + if path == dst { + return Ok(()) + } + } + + opts.config.shell().warn(&format!("be sure to add `{}` to your PATH to be \ + able to run the installed binaries", + dst.display()))?; + } + + if scheduled_error { + bail!("some crates failed to install"); + } + + Ok(()) +} + +fn install_one(root: &Filesystem, + map: &SourceConfigMap, + krate: Option<&str>, + source_id: &SourceId, + vers: Option<&str>, + opts: &ops::CompileOptions, + force: bool, + is_first_install: bool) -> CargoResult<()> { + + let config = opts.config; + + let (pkg, source) = if source_id.is_git() { + select_pkg(GitSource::new(source_id, config)?, + krate, vers, config, is_first_install, + &mut |git| git.read_packages())? + } else if source_id.is_path() { + let path = source_id.url().to_file_path() + .map_err(|()| CargoError::from("path sources must have a valid path"))?; + let mut src = PathSource::new(&path, source_id, config); + src.update().chain_err(|| { + format!("`{}` is not a crate root; specify a crate to \ + install from crates.io, or use --path or --git to \ + specify an alternate source", path.display()) + })?; + select_pkg(PathSource::new(&path, source_id, config), + krate, vers, config, is_first_install, + &mut |path| path.read_packages())? + } else { + select_pkg(map.load(source_id)?, + krate, vers, config, is_first_install, + &mut |_| Err("must specify a crate to install from \ + crates.io, or use --path or --git to \ + specify alternate source".into()))? + }; + + let mut td_opt = None; + let overidden_target_dir = if source_id.is_path() { + None + } else if let Ok(td) = TempDir::new("cargo-install") { + let p = td.path().to_owned(); + td_opt = Some(td); + Some(Filesystem::new(p)) + } else { + Some(Filesystem::new(config.cwd().join("target-install"))) + }; + + let ws = match overidden_target_dir { + Some(dir) => Workspace::ephemeral(pkg, config, Some(dir), false)?, + None => Workspace::new(pkg.manifest_path(), config)?, + }; + let pkg = ws.current()?; + + config.shell().status("Installing", pkg)?; + + // Preflight checks to check up front whether we'll overwrite something. + // We have to check this again afterwards, but may as well avoid building + // anything if we're gonna throw it away anyway. + { + let metadata = metadata(config, root)?; + let list = read_crate_list(&metadata)?; + let dst = metadata.parent().join("bin"); + check_overwrites(&dst, pkg, &opts.filter, &list, force)?; + } + + let compile = ops::compile_ws(&ws, + Some(source), + opts, + Arc::new(DefaultExecutor)).chain_err(|| { + if let Some(td) = td_opt.take() { + // preserve the temporary directory, so the user can inspect it + td.into_path(); + } + + CargoError::from(format!("failed to compile `{}`, intermediate artifacts can be \ + found at `{}`", pkg, ws.target_dir().display())) + })?; + let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| { + let name = bin.file_name().unwrap(); + if let Some(s) = name.to_str() { + Ok((s, bin.as_ref())) + } else { + bail!("Binary `{:?}` name can't be serialized into string", name) + } + }).collect::>()?; + if binaries.is_empty() { + bail!("no binaries are available for install using the selected \ + features"); + } + + let metadata = metadata(config, root)?; + let mut list = read_crate_list(&metadata)?; + let dst = metadata.parent().join("bin"); + let duplicates = check_overwrites(&dst, pkg, &opts.filter, + &list, force)?; + + fs::create_dir_all(&dst)?; + + // Copy all binaries to a temporary directory under `dst` first, catching + // some failure modes (e.g. out of space) before touching the existing + // binaries. This directory will get cleaned up via RAII. + let staging_dir = TempDir::new_in(&dst, "cargo-install")?; + for &(bin, src) in binaries.iter() { + let dst = staging_dir.path().join(bin); + // Try to move if `target_dir` is transient. + if !source_id.is_path() && fs::rename(src, &dst).is_ok() { + continue + } + fs::copy(src, &dst).chain_err(|| { + format!("failed to copy `{}` to `{}`", src.display(), + dst.display()) + })?; + } + + let (to_replace, to_install): (Vec<&str>, Vec<&str>) = + binaries.iter().map(|&(bin, _)| bin) + .partition(|&bin| duplicates.contains_key(bin)); + + let mut installed = Transaction { bins: Vec::new() }; + + // Move the temporary copies into `dst` starting with new binaries. + for bin in to_install.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Installing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format!("failed to move `{}` to `{}`", src.display(), + dst.display()) + })?; + installed.bins.push(dst); + } + + // Repeat for binaries which replace existing ones but don't pop the error + // up until after updating metadata. + let mut replaced_names = Vec::new(); + let result = { + let mut try_install = || -> CargoResult<()> { + for &bin in to_replace.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Replacing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format!("failed to move `{}` to `{}`", src.display(), + dst.display()) + })?; + replaced_names.push(bin); + } + Ok(()) + }; + try_install() + }; + + // Update records of replaced binaries. + for &bin in replaced_names.iter() { + if let Some(&Some(ref p)) = duplicates.get(bin) { + if let Some(set) = list.v1.get_mut(p) { + set.remove(bin); + } + } + list.v1.entry(pkg.package_id().clone()) + .or_insert_with(|| BTreeSet::new()) + .insert(bin.to_string()); + } + + // Remove empty metadata lines. + let pkgs = list.v1.iter() + .filter_map(|(p, set)| if set.is_empty() { Some(p.clone()) } else { None }) + .collect::>(); + for p in pkgs.iter() { + list.v1.remove(p); + } + + // If installation was successful record newly installed binaries. + if result.is_ok() { + list.v1.entry(pkg.package_id().clone()) + .or_insert_with(|| BTreeSet::new()) + .extend(to_install.iter().map(|s| s.to_string())); + } + + let write_result = write_crate_list(&metadata, list); + match write_result { + // Replacement error (if any) isn't actually caused by write error + // but this seems to be the only way to show both. + Err(err) => result.chain_err(|| err)?, + Ok(_) => result?, + } + + // Reaching here means all actions have succeeded. Clean up. + installed.success(); + if !source_id.is_path() { + // Don't bother grabbing a lock as we're going to blow it all away + // anyway. + let target_dir = ws.target_dir().into_path_unlocked(); + fs::remove_dir_all(&target_dir)?; + } + + Ok(()) +} + +fn select_pkg<'a, T>(mut source: T, + name: Option<&str>, + vers: Option<&str>, + config: &Config, + needs_update: bool, + list_all: &mut FnMut(&mut T) -> CargoResult>) + -> CargoResult<(Package, Box)> + where T: Source + 'a +{ + if needs_update { + source.update()?; + } + + match name { + Some(name) => { + let vers = match vers { + Some(v) => { + + // If the version begins with character <, >, =, ^, ~ parse it as a + // version range, otherwise parse it as a specific version + let first = v.chars() + .nth(0) + .ok_or("no version provided for the `--vers` flag")?; + + match first { + '<' | '>' | '=' | '^' | '~' => match v.parse::() { + Ok(v) => Some(v.to_string()), + Err(_) => { + let msg = format!("the `--vers` provided, `{}`, is \ + not a valid semver version requirement\n\n + Please have a look at \ + http://doc.crates.io/specifying-dependencies.html \ + for the correct format", v); + return Err(msg.into()); + } + }, + _ => match v.parse::() { + Ok(v) => Some(format!("={}", v)), + Err(_) => { + let mut msg = format!("the `--vers` provided, `{}`, is \ + not a valid semver version\n\n\ + historically Cargo treated this \ + as a semver version requirement \ + accidentally\nand will continue \ + to do so, but this behavior \ + will be removed eventually", v); + + // If it is not a valid version but it is a valid version + // requirement, add a note to the warning + if v.parse::().is_ok() { + msg.push_str(&format!("\nif you want to specify semver range, \ + add an explicit qualifier, like ^{}", v)); + } + config.shell().warn(&msg)?; + Some(v.to_string()) + } + } + } + } + None => None, + }; + let vers = vers.as_ref().map(|s| &**s); + let dep = Dependency::parse_no_deprecated(name, vers, source.source_id())?; + let deps = source.query_vec(&dep)?; + match deps.iter().map(|p| p.package_id()).max() { + Some(pkgid) => { + let pkg = source.download(pkgid)?; + Ok((pkg, Box::new(source))) + } + None => { + let vers_info = vers.map(|v| format!(" with version `{}`", v)) + .unwrap_or_default(); + Err(format!("could not find `{}` in `{}`{}", name, + source.source_id(), vers_info).into()) + } + } + } + None => { + let candidates = list_all(&mut source)?; + let binaries = candidates.iter().filter(|cand| { + cand.targets().iter().filter(|t| t.is_bin()).count() > 0 + }); + let examples = candidates.iter().filter(|cand| { + cand.targets().iter().filter(|t| t.is_example()).count() > 0 + }); + let pkg = match one(binaries, |v| multi_err("binaries", v))? { + Some(p) => p, + None => { + match one(examples, |v| multi_err("examples", v))? { + Some(p) => p, + None => bail!("no packages found with binaries or \ + examples"), + } + } + }; + return Ok((pkg.clone(), Box::new(source))); + + fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { + pkgs.sort_by(|a, b| a.name().cmp(b.name())); + format!("multiple packages with {} found: {}", kind, + pkgs.iter().map(|p| p.name()).collect::>() + .join(", ")) + } + } + } +} + +fn one(mut i: I, f: F) -> CargoResult> + where I: Iterator, + F: FnOnce(Vec) -> String +{ + match (i.next(), i.next()) { + (Some(i1), Some(i2)) => { + let mut v = vec![i1, i2]; + v.extend(i); + Err(f(v).into()) + } + (Some(i), None) => Ok(Some(i)), + (None, _) => Ok(None) + } +} + +fn check_overwrites(dst: &Path, + pkg: &Package, + filter: &ops::CompileFilter, + prev: &CrateListingV1, + force: bool) -> CargoResult>> { + // If explicit --bin or --example flags were passed then those'll + // get checked during cargo_compile, we only care about the "build + // everything" case here + if !filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { + bail!("specified package has no binaries") + } + let duplicates = find_duplicates(dst, pkg, filter, prev); + if force || duplicates.is_empty() { + return Ok(duplicates) + } + // Format the error message. + let mut msg = String::new(); + for (bin, p) in duplicates.iter() { + msg.push_str(&format!("binary `{}` already exists in destination", bin)); + if let Some(p) = p.as_ref() { + msg.push_str(&format!(" as part of `{}`\n", p)); + } else { + msg.push_str("\n"); + } + } + msg.push_str("Add --force to overwrite"); + Err(msg.into()) +} + +fn find_duplicates(dst: &Path, + pkg: &Package, + filter: &ops::CompileFilter, + prev: &CrateListingV1) -> BTreeMap> { + let check = |name: String| { + // Need to provide type, works around Rust Issue #93349 + let name = format!("{}{}", name, env::consts::EXE_SUFFIX); + if fs::metadata(dst.join(&name)).is_err() { + None + } else if let Some((p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) { + Some((name, Some(p.clone()))) + } else { + Some((name, None)) + } + }; + match *filter { + CompileFilter::Default { .. } => { + pkg.targets().iter() + .filter(|t| t.is_bin()) + .filter_map(|t| check(t.name().to_string())) + .collect() + } + CompileFilter::Only { bins, examples, .. } => { + let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { + pkg.targets().iter().filter(|t| t.is_bin()) + .map(|t| t.name().to_string()) + .collect() + }); + let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { + pkg.targets().iter().filter(|t| t.is_bin_example()) + .map(|t| t.name().to_string()) + .collect() + }); + + all_bins.iter().chain(all_examples.iter()) + .filter_map(|t| check(t.clone())) + .collect::>>() + } + } +} + +fn read_crate_list(file: &FileLock) -> CargoResult { + (|| -> CargoResult<_> { + let mut contents = String::new(); + file.file().read_to_string(&mut contents)?; + let listing = toml::from_str(&contents).chain_err(|| { + internal("invalid TOML found for metadata") + })?; + match listing { + CrateListing::V1(v1) => Ok(v1), + CrateListing::Empty(_) => { + Ok(CrateListingV1 { v1: BTreeMap::new() }) + } + } + })().chain_err(|| { + format!("failed to parse crate metadata at `{}`", + file.path().to_string_lossy()) + }) +} + +fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()> { + (|| -> CargoResult<_> { + let mut file = file.file(); + file.seek(SeekFrom::Start(0))?; + file.set_len(0)?; + let data = toml::to_string(&CrateListing::V1(listing))?; + file.write_all(data.as_bytes())?; + Ok(()) + })().chain_err(|| { + format!("failed to write crate metadata at `{}`", + file.path().to_string_lossy()) + }) +} + +pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { + let dst = resolve_root(dst, config)?; + let dst = metadata(config, &dst)?; + let list = read_crate_list(&dst)?; + for (k, v) in list.v1.iter() { + println!("{}:", k); + for bin in v { + println!(" {}", bin); + } + } + Ok(()) +} + +pub fn uninstall(root: Option<&str>, + spec: &str, + bins: &[String], + config: &Config) -> CargoResult<()> { + let root = resolve_root(root, config)?; + let crate_metadata = metadata(config, &root)?; + let mut metadata = read_crate_list(&crate_metadata)?; + let mut to_remove = Vec::new(); + { + let result = PackageIdSpec::query_str(spec, metadata.v1.keys())? + .clone(); + let mut installed = match metadata.v1.entry(result.clone()) { + Entry::Occupied(e) => e, + Entry::Vacant(..) => panic!("entry not found: {}", result), + }; + let dst = crate_metadata.parent().join("bin"); + for bin in installed.get() { + let bin = dst.join(bin); + if fs::metadata(&bin).is_err() { + bail!("corrupt metadata, `{}` does not exist when it should", + bin.display()) + } + } + + let bins = bins.iter().map(|s| { + if s.ends_with(env::consts::EXE_SUFFIX) { + s.to_string() + } else { + format!("{}{}", s, env::consts::EXE_SUFFIX) + } + }).collect::>(); + + for bin in bins.iter() { + if !installed.get().contains(bin) { + bail!("binary `{}` not installed as part of `{}`", bin, result) + } + } + + if bins.is_empty() { + to_remove.extend(installed.get().iter().map(|b| dst.join(b))); + installed.get_mut().clear(); + } else { + for bin in bins.iter() { + to_remove.push(dst.join(bin)); + installed.get_mut().remove(bin); + } + } + if installed.get().is_empty() { + installed.remove(); + } + } + write_crate_list(&crate_metadata, metadata)?; + for bin in to_remove { + config.shell().status("Removing", bin.display())?; + fs::remove_file(bin)?; + } + + Ok(()) +} + +fn metadata(config: &Config, root: &Filesystem) -> CargoResult { + root.open_rw(Path::new(".crates.toml"), config, "crate metadata") +} + +fn resolve_root(flag: Option<&str>, + config: &Config) -> CargoResult { + let config_root = config.get_path("install.root")?; + Ok(flag.map(PathBuf::from).or_else(|| { + env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from) + }).or_else(move || { + config_root.map(|v| v.val) + }).map(Filesystem::new).unwrap_or_else(|| { + config.home().clone() + })) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_new.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_new.rs new file mode 100644 index 000000000..9c597df85 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_new.rs @@ -0,0 +1,607 @@ +use std::collections::BTreeMap; +use std::env; +use std::fs; +use std::path::Path; + +use serde::{Deserialize, Deserializer}; +use serde::de; + +use git2::Config as GitConfig; +use git2::Repository as GitRepository; + +use core::Workspace; +use ops::is_bad_artifact_name; +use util::{GitRepo, HgRepo, PijulRepo, FossilRepo, internal}; +use util::{Config, paths}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +use toml; + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum VersionControl { Git, Hg, Pijul, Fossil, NoVcs } + +pub struct NewOptions<'a> { + pub version_control: Option, + pub bin: bool, + pub lib: bool, + pub path: &'a str, + pub name: Option<&'a str>, +} + +struct SourceFileInformation { + relative_path: String, + target_name: String, + bin: bool, +} + +struct MkOptions<'a> { + version_control: Option, + path: &'a Path, + name: &'a str, + source_files: Vec, + bin: bool, +} + +impl<'de> Deserialize<'de> for VersionControl { + fn deserialize>(d: D) -> Result { + Ok(match &String::deserialize(d)?[..] { + "git" => VersionControl::Git, + "hg" => VersionControl::Hg, + "pijul" => VersionControl::Pijul, + "fossil" => VersionControl::Fossil, + "none" => VersionControl::NoVcs, + n => { + let value = de::Unexpected::Str(n); + let msg = "unsupported version control system"; + return Err(de::Error::invalid_value(value, &msg)); + } + }) + } +} + +impl<'a> NewOptions<'a> { + pub fn new(version_control: Option, + bin: bool, + lib: bool, + path: &'a str, + name: Option<&'a str>) -> NewOptions<'a> { + + // default to lib + let is_lib = if !bin { + true + } + else { + lib + }; + + NewOptions { + version_control: version_control, + bin: bin, + lib: is_lib, + path: path, + name: name, + } + } +} + +struct CargoNewConfig { + name: Option, + email: Option, + version_control: Option, +} + +fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoResult<&'a str> { + if let Some(name) = opts.name { + return Ok(name); + } + + if path.file_name().is_none() { + bail!("cannot auto-detect project name from path {:?} ; use --name to override", + path.as_os_str()); + } + + let dir_name = path.file_name().and_then(|s| s.to_str()).ok_or_else(|| { + CargoError::from(format!("cannot create a project with a non-unicode name: {:?}", + path.file_name().unwrap())) + })?; + + if opts.bin { + Ok(dir_name) + } else { + let new_name = strip_rust_affixes(dir_name); + if new_name != dir_name { + writeln!(config.shell().err(), + "note: package will be named `{}`; use --name to override", + new_name)?; + } + Ok(new_name) + } +} + +fn check_name(name: &str, is_bin: bool) -> CargoResult<()> { + + // Ban keywords + test list found at + // https://doc.rust-lang.org/grammar.html#keywords + let blacklist = ["abstract", "alignof", "as", "become", "box", + "break", "const", "continue", "crate", "do", + "else", "enum", "extern", "false", "final", + "fn", "for", "if", "impl", "in", + "let", "loop", "macro", "match", "mod", + "move", "mut", "offsetof", "override", "priv", + "proc", "pub", "pure", "ref", "return", + "self", "sizeof", "static", "struct", + "super", "test", "trait", "true", "type", "typeof", + "unsafe", "unsized", "use", "virtual", "where", + "while", "yield"]; + if blacklist.contains(&name) || (is_bin && is_bad_artifact_name(name)) { + bail!("The name `{}` cannot be used as a crate name\n\ + use --name to override crate name", + name) + } + + if let Some(ref c) = name.chars().nth(0) { + if c.is_digit(10) { + bail!("Package names starting with a digit cannot be used as a crate name\n\ + use --name to override crate name") + } + } + + for c in name.chars() { + if c.is_alphanumeric() { continue } + if c == '_' || c == '-' { continue } + bail!("Invalid character `{}` in crate name: `{}`\n\ + use --name to override crate name", + c, name) + } + Ok(()) +} + +fn detect_source_paths_and_types(project_path : &Path, + project_name: &str, + detected_files: &mut Vec, + ) -> CargoResult<()> { + let path = project_path; + let name = project_name; + + enum H { + Bin, + Lib, + Detect, + } + + struct Test { + proposed_path: String, + handling: H, + } + + let tests = vec![ + Test { proposed_path: format!("src/main.rs"), handling: H::Bin }, + Test { proposed_path: format!("main.rs"), handling: H::Bin }, + Test { proposed_path: format!("src/{}.rs", name), handling: H::Detect }, + Test { proposed_path: format!("{}.rs", name), handling: H::Detect }, + Test { proposed_path: format!("src/lib.rs"), handling: H::Lib }, + Test { proposed_path: format!("lib.rs"), handling: H::Lib }, + ]; + + for i in tests { + let pp = i.proposed_path; + + // path/pp does not exist or is not a file + if !fs::metadata(&path.join(&pp)).map(|x| x.is_file()).unwrap_or(false) { + continue; + } + + let sfi = match i.handling { + H::Bin => { + SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: true + } + } + H::Lib => { + SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: false + } + } + H::Detect => { + let content = paths::read(&path.join(pp.clone()))?; + let isbin = content.contains("fn main"); + SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: isbin + } + } + }; + detected_files.push(sfi); + } + + // Check for duplicate lib attempt + + let mut previous_lib_relpath : Option<&str> = None; + let mut duplicates_checker : BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); + + for i in detected_files { + if i.bin { + if let Some(x) = BTreeMap::get::(&duplicates_checker, i.target_name.as_ref()) { + bail!("\ +multiple possible binary sources found: + {} + {} +cannot automatically generate Cargo.toml as the main target would be ambiguous", + &x.relative_path, &i.relative_path); + } + duplicates_checker.insert(i.target_name.as_ref(), i); + } else { + if let Some(plp) = previous_lib_relpath { + return Err(format!("cannot have a project with \ + multiple libraries, \ + found both `{}` and `{}`", + plp, i.relative_path).into()); + } + previous_lib_relpath = Some(&i.relative_path); + } + } + + Ok(()) +} + +fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation { + if bin { + SourceFileInformation { + relative_path: "src/main.rs".to_string(), + target_name: project_name, + bin: true, + } + } else { + SourceFileInformation { + relative_path: "src/lib.rs".to_string(), + target_name: project_name, + bin: false, + } + } +} + +pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = config.cwd().join(opts.path); + if fs::metadata(&path).is_ok() { + bail!("destination `{}` already exists\n\n\ + Use `cargo init` to initialize the directory\ + ", path.display() + ) + } + + if opts.lib && opts.bin { + bail!("can't specify both lib and binary outputs") + } + + let name = get_name(&path, opts, config)?; + check_name(name, opts.bin)?; + + let mkopts = MkOptions { + version_control: opts.version_control, + path: &path, + name: name, + source_files: vec![plan_new_source_file(opts.bin, name.to_string())], + bin: opts.bin, + }; + + mk(config, &mkopts).chain_err(|| { + format!("Failed to create project `{}` at `{}`", + name, path.display()) + }) +} + +pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = config.cwd().join(opts.path); + + let cargotoml_path = path.join("Cargo.toml"); + if fs::metadata(&cargotoml_path).is_ok() { + bail!("`cargo init` cannot be run on existing Cargo projects") + } + + if opts.lib && opts.bin { + bail!("can't specify both lib and binary outputs"); + } + + let name = get_name(&path, opts, config)?; + check_name(name, opts.bin)?; + + let mut src_paths_types = vec![]; + + detect_source_paths_and_types(&path, name, &mut src_paths_types)?; + + if src_paths_types.is_empty() { + src_paths_types.push(plan_new_source_file(opts.bin, name.to_string())); + } else { + // --bin option may be ignored if lib.rs or src/lib.rs present + // Maybe when doing `cargo init --bin` inside a library project stub, + // user may mean "initialize for library, but also add binary target" + } + + let mut version_control = opts.version_control; + + if version_control == None { + let mut num_detected_vsces = 0; + + if fs::metadata(&path.join(".git")).is_ok() { + version_control = Some(VersionControl::Git); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".hg")).is_ok() { + version_control = Some(VersionControl::Hg); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".pijul")).is_ok() { + version_control = Some(VersionControl::Pijul); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".fossil")).is_ok() { + version_control = Some(VersionControl::Fossil); + num_detected_vsces += 1; + } + + // if none exists, maybe create git, like in `cargo new` + + if num_detected_vsces > 1 { + bail!("more than one of .hg, .git, .pijul, .fossil configurations \ + found and the ignore file can't be filled in as \ + a result. specify --vcs to override detection"); + } + } + + let mkopts = MkOptions { + version_control: version_control, + path: &path, + name: name, + bin: src_paths_types.iter().any(|x|x.bin), + source_files: src_paths_types, + }; + + mk(config, &mkopts).chain_err(|| { + format!("Failed to create project `{}` at `{}`", + name, path.display()) + }) +} + +fn strip_rust_affixes(name: &str) -> &str { + for &prefix in &["rust-", "rust_", "rs-", "rs_"] { + if name.starts_with(prefix) { + return &name[prefix.len()..]; + } + } + for &suffix in &["-rust", "_rust", "-rs", "_rs"] { + if name.ends_with(suffix) { + return &name[..name.len()-suffix.len()]; + } + } + name +} + +fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { + GitRepo::discover(path, cwd).is_ok() || HgRepo::discover(path, cwd).is_ok() +} + +fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { + let path = opts.path; + let name = opts.name; + let cfg = global_config(config)?; + // Please ensure that ignore and hgignore are in sync. + let ignore = ["/target/\n", "**/*.rs.bk\n", + if !opts.bin { "Cargo.lock\n" } else { "" }] + .concat(); + // Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the + // file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for + // more. + let hgignore = ["^target/\n", "glob:*.rs.bk\n", + if !opts.bin { "glob:Cargo.lock\n" } else { "" }] + .concat(); + + let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); + let vcs = match (opts.version_control, cfg.version_control, in_existing_vcs_repo) { + (None, None, false) => VersionControl::Git, + (None, Some(option), false) => option, + (Some(option), _, _) => option, + (_, _, true) => VersionControl::NoVcs, + }; + match vcs { + VersionControl::Git => { + if !fs::metadata(&path.join(".git")).is_ok() { + GitRepo::init(path, config.cwd())?; + } + paths::append(&path.join(".gitignore"), ignore.as_bytes())?; + }, + VersionControl::Hg => { + if !fs::metadata(&path.join(".hg")).is_ok() { + HgRepo::init(path, config.cwd())?; + } + paths::append(&path.join(".hgignore"), hgignore.as_bytes())?; + }, + VersionControl::Pijul => { + if !fs::metadata(&path.join(".pijul")).is_ok() { + PijulRepo::init(path, config.cwd())?; + } + }, + VersionControl::Fossil => { + if !fs::metadata(&path.join(".fossil")).is_ok() { + FossilRepo::init(path, config.cwd())?; + } + }, + VersionControl::NoVcs => { + fs::create_dir_all(path)?; + }, + }; + + let (author_name, email) = discover_author()?; + // Hoo boy, sure glad we've got exhaustiveness checking behind us. + let author = match (cfg.name, cfg.email, author_name, email) { + (Some(name), Some(email), _, _) | + (Some(name), None, _, Some(email)) | + (None, Some(email), name, _) | + (None, None, name, Some(email)) => format!("{} <{}>", name, email), + (Some(name), None, _, None) | + (None, None, name, None) => name, + }; + + let mut cargotoml_path_specifier = String::new(); + + // Calculate what [lib] and [[bin]]s do we need to append to Cargo.toml + + for i in &opts.source_files { + if i.bin { + if i.relative_path != "src/main.rs" { + cargotoml_path_specifier.push_str(&format!(r#" +[[bin]] +name = "{}" +path = {} +"#, i.target_name, toml::Value::String(i.relative_path.clone()))); + } + } else if i.relative_path != "src/lib.rs" { + cargotoml_path_specifier.push_str(&format!(r#" +[lib] +name = "{}" +path = {} +"#, i.target_name, toml::Value::String(i.relative_path.clone()))); + } + } + + // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed + + paths::write(&path.join("Cargo.toml"), format!( +r#"[package] +name = "{}" +version = "0.1.0" +authors = [{}] + +[dependencies] +{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?; + + + // Create all specified source files + // (with respective parent directories) + // if they are don't exist + + for i in &opts.source_files { + let path_of_source_file = path.join(i.relative_path.clone()); + + if let Some(src_dir) = path_of_source_file.parent() { + fs::create_dir_all(src_dir)?; + } + + let default_file_content : &[u8] = if i.bin { + b"\ +fn main() { + println!(\"Hello, world!\"); +} +" + } else { + b"\ +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} +" + }; + + if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) { + paths::write(&path_of_source_file, default_file_content)?; + } + } + + if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { + let msg = format!("compiling this new crate may not work due to invalid \ + workspace configuration\n\n{}", e); + config.shell().warn(msg)?; + } + + Ok(()) +} + +fn get_environment_variable(variables: &[&str] ) -> Option{ + variables.iter() + .filter_map(|var| env::var(var).ok()) + .next() +} + +fn discover_author() -> CargoResult<(String, Option)> { + let cwd = env::current_dir()?; + let git_config = if let Ok(repo) = GitRepository::discover(&cwd) { + repo.config().ok().or_else(|| GitConfig::open_default().ok()) + } else { + GitConfig::open_default().ok() + }; + let git_config = git_config.as_ref(); + let name_variables = ["CARGO_NAME", "GIT_AUTHOR_NAME", "GIT_COMMITTER_NAME", + "USER", "USERNAME", "NAME"]; + let name = get_environment_variable(&name_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) + .or_else(|| get_environment_variable(&name_variables[3..])); + + let name = match name { + Some(name) => name, + None => { + let username_var = if cfg!(windows) {"USERNAME"} else {"USER"}; + bail!("could not determine the current user, please set ${}", + username_var) + } + }; + let email_variables = ["CARGO_EMAIL", "GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", + "EMAIL"]; + let email = get_environment_variable(&email_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) + .or_else(|| get_environment_variable(&email_variables[3..])); + + let name = name.trim().to_string(); + let email = email.map(|s| s.trim().to_string()); + + Ok((name, email)) +} + +fn global_config(config: &Config) -> CargoResult { + let name = config.get_string("cargo-new.name")?.map(|s| s.val); + let email = config.get_string("cargo-new.email")?.map(|s| s.val); + let vcs = config.get_string("cargo-new.vcs")?; + + let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) { + Some(("git", _)) => Some(VersionControl::Git), + Some(("hg", _)) => Some(VersionControl::Hg), + Some(("none", _)) => Some(VersionControl::NoVcs), + Some((s, p)) => { + return Err(internal(format!("invalid configuration for key \ + `cargo-new.vcs`, unknown vcs `{}` \ + (found in {})", s, p))) + } + None => None + }; + Ok(CargoNewConfig { + name: name, + email: email, + version_control: vcs, + }) +} + +#[cfg(test)] +mod tests { + use super::strip_rust_affixes; + + #[test] + fn affixes_stripped() { + assert_eq!(strip_rust_affixes("rust-foo"), "foo"); + assert_eq!(strip_rust_affixes("foo-rs"), "foo"); + assert_eq!(strip_rust_affixes("rs_foo"), "foo"); + // Only one affix is stripped + assert_eq!(strip_rust_affixes("rs-foo-rs"), "foo-rs"); + assert_eq!(strip_rust_affixes("foo-rs-rs"), "foo-rs"); + // It shouldn't touch the middle + assert_eq!(strip_rust_affixes("some-rust-crate"), "some-rust-crate"); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_output_metadata.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_output_metadata.rs new file mode 100644 index 000000000..1dca07451 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_output_metadata.rs @@ -0,0 +1,106 @@ +use serde::ser::{self, Serialize}; + +use core::resolver::Resolve; +use core::{Package, PackageId, Workspace}; +use ops::{self, Packages}; +use util::CargoResult; + +const VERSION: u32 = 1; + +pub struct OutputMetadataOptions { + pub features: Vec, + pub no_default_features: bool, + pub all_features: bool, + pub no_deps: bool, + pub version: u32, +} + +/// Loads the manifest, resolves the dependencies of the project to the concrete +/// used versions - considering overrides - and writes all dependencies in a JSON +/// format to stdout. +pub fn output_metadata(ws: &Workspace, + opt: &OutputMetadataOptions) -> CargoResult { + if opt.version != VERSION { + bail!("metadata version {} not supported, only {} is currently supported", + opt.version, VERSION); + } + if opt.no_deps { + metadata_no_deps(ws, opt) + } else { + metadata_full(ws, opt) + } +} + +fn metadata_no_deps(ws: &Workspace, + _opt: &OutputMetadataOptions) -> CargoResult { + Ok(ExportInfo { + packages: ws.members().cloned().collect(), + workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), + resolve: None, + target_directory: ws.target_dir().display().to_string(), + version: VERSION, + }) +} + +fn metadata_full(ws: &Workspace, + opt: &OutputMetadataOptions) -> CargoResult { + let specs = Packages::All.into_package_id_specs(ws)?; + let deps = ops::resolve_ws_precisely(ws, + None, + &opt.features, + opt.all_features, + opt.no_default_features, + &specs)?; + let (packages, resolve) = deps; + + let packages = packages.package_ids() + .map(|i| packages.get(i).map(|p| p.clone())) + .collect::>>()?; + + Ok(ExportInfo { + packages: packages, + workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), + resolve: Some(MetadataResolve{ + resolve: resolve, + root: ws.current_opt().map(|pkg| pkg.package_id().clone()), + }), + target_directory: ws.target_dir().display().to_string(), + version: VERSION, + }) +} + +#[derive(Serialize)] +pub struct ExportInfo { + packages: Vec, + workspace_members: Vec, + resolve: Option, + target_directory: String, + version: u32, +} + +/// Newtype wrapper to provide a custom `Serialize` implementation. +/// The one from lockfile does not fit because it uses a non-standard +/// format for `PackageId`s +#[derive(Serialize)] +struct MetadataResolve { + #[serde(rename = "nodes", serialize_with = "serialize_resolve")] + resolve: Resolve, + root: Option, +} + +fn serialize_resolve(resolve: &Resolve, s: S) -> Result + where S: ser::Serializer, +{ + #[derive(Serialize)] + struct Node<'a> { + id: &'a PackageId, + dependencies: Vec<&'a PackageId>, + } + + resolve.iter().map(|id| { + Node { + id: id, + dependencies: resolve.deps(id).collect(), + } + }).collect::>().serialize(s) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_package.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_package.rs new file mode 100644 index 000000000..f12ce12f3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_package.rs @@ -0,0 +1,340 @@ +use std::fs::{self, File}; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::{self, Path}; +use std::sync::Arc; + +use flate2::read::GzDecoder; +use flate2::{GzBuilder, Compression}; +use git2; +use tar::{Archive, Builder, Header, EntryType}; + +use core::{Package, Workspace, Source, SourceId}; +use sources::PathSource; +use util::{self, internal, Config, FileLock}; +use util::errors::{CargoResult, CargoResultExt}; +use ops::{self, DefaultExecutor}; + +pub struct PackageOpts<'cfg> { + pub config: &'cfg Config, + pub list: bool, + pub check_metadata: bool, + pub allow_dirty: bool, + pub verify: bool, + pub jobs: Option, + pub target: Option<&'cfg str>, +} + +pub fn package(ws: &Workspace, + opts: &PackageOpts) -> CargoResult> { + let pkg = ws.current()?; + let config = ws.config(); + if !pkg.manifest().features().activated().is_empty() { + bail!("cannot package or publish crates which activate nightly-only \ + cargo features") + } + let mut src = PathSource::new(pkg.root(), + pkg.package_id().source_id(), + config); + src.update()?; + + if opts.check_metadata { + check_metadata(pkg, config)?; + } + + verify_dependencies(pkg)?; + + if opts.list { + let root = pkg.root(); + let mut list: Vec<_> = src.list_files(pkg)?.iter().map(|file| { + util::without_prefix(file, root).unwrap().to_path_buf() + }).collect(); + list.sort(); + for file in list.iter() { + println!("{}", file.display()); + } + return Ok(None) + } + + if !opts.allow_dirty { + check_not_dirty(pkg, &src)?; + } + + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let dir = ws.target_dir().join("package"); + let mut dst = { + let tmp = format!(".{}", filename); + dir.open_rw(&tmp, config, "package scratch space")? + }; + + // Package up and test a temporary tarball and only move it to the final + // location if it actually passes all our tests. Any previously existing + // tarball can be assumed as corrupt or invalid, so we just blow it away if + // it exists. + config.shell().status("Packaging", pkg.package_id().to_string())?; + dst.file().set_len(0)?; + tar(ws, &src, dst.file(), &filename).chain_err(|| { + "failed to prepare local package for uploading" + })?; + if opts.verify { + dst.seek(SeekFrom::Start(0))?; + run_verify(ws, dst.file(), opts).chain_err(|| { + "failed to verify package tarball" + })? + } + dst.seek(SeekFrom::Start(0))?; + { + let src_path = dst.path(); + let dst_path = dst.parent().join(&filename); + fs::rename(&src_path, &dst_path).chain_err(|| { + "failed to move temporary tarball into final location" + })?; + } + Ok(Some(dst)) +} + +// check that the package has some piece of metadata that a human can +// use to tell what the package is about. +fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { + let md = pkg.manifest().metadata(); + + let mut missing = vec![]; + + macro_rules! lacking { + ($( $($field: ident)||* ),*) => {{ + $( + if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* { + $(missing.push(stringify!($field).replace("_", "-"));)* + } + )* + }} + } + lacking!(description, license || license_file, documentation || homepage || repository); + + if !missing.is_empty() { + let mut things = missing[..missing.len() - 1].join(", "); + // things will be empty if and only if length == 1 (i.e. the only case + // to have no `or`). + if !things.is_empty() { + things.push_str(" or "); + } + things.push_str(missing.last().unwrap()); + + config.shell().warn( + &format!("manifest has no {things}.\n\ + See http://doc.crates.io/manifest.html#package-metadata for more info.", + things = things))? + } + Ok(()) +} + +// check that the package dependencies are safe to deploy. +fn verify_dependencies(pkg: &Package) -> CargoResult<()> { + for dep in pkg.dependencies() { + if dep.source_id().is_path() && !dep.specified_req() { + bail!("all path dependencies must have a version specified \ + when packaging.\ndependency `{}` does not specify \ + a version.", dep.name()) + } + } + Ok(()) +} + +fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> { + if let Ok(repo) = git2::Repository::discover(p.root()) { + if let Some(workdir) = repo.workdir() { + debug!("found a git repo at {:?}, checking if index present", + workdir); + let path = p.manifest_path(); + let path = path.strip_prefix(workdir).unwrap_or(path); + if let Ok(status) = repo.status_file(path) { + if (status & git2::STATUS_IGNORED).is_empty() { + debug!("Cargo.toml found in repo, checking if dirty"); + return git(p, src, &repo) + } + } + } + } + + // No VCS recognized, we don't know if the directory is dirty or not, so we + // have to assume that it's clean. + return Ok(()); + + fn git(p: &Package, + src: &PathSource, + repo: &git2::Repository) -> CargoResult<()> { + let workdir = repo.workdir().unwrap(); + let dirty = src.list_files(p)?.iter().filter(|file| { + let relative = file.strip_prefix(workdir).unwrap(); + if let Ok(status) = repo.status_file(relative) { + status != git2::STATUS_CURRENT + } else { + false + } + }).map(|path| { + path.strip_prefix(p.root()).unwrap_or(path).display().to_string() + }).collect::>(); + if dirty.is_empty() { + Ok(()) + } else { + bail!("{} files in the working directory contain changes that were \ + not yet committed into git:\n\n{}\n\n\ + to proceed despite this, pass the `--allow-dirty` flag", + dirty.len(), dirty.join("\n")) + } + } +} + +fn tar(ws: &Workspace, + src: &PathSource, + dst: &File, + filename: &str) -> CargoResult<()> { + // Prepare the encoder and its header + let filename = Path::new(filename); + let encoder = GzBuilder::new().filename(util::path2bytes(filename)?) + .write(dst, Compression::Best); + + // Put all package files into a compressed archive + let mut ar = Builder::new(encoder); + let pkg = ws.current()?; + let config = ws.config(); + let root = pkg.root(); + for file in src.list_files(pkg)?.iter() { + let relative = util::without_prefix(file, root).unwrap(); + check_filename(relative)?; + let relative = relative.to_str().ok_or_else(|| { + format!("non-utf8 path in source directory: {}", + relative.display()) + })?; + config.shell().verbose(|shell| { + shell.status("Archiving", &relative) + })?; + let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), + path::MAIN_SEPARATOR, relative); + + // The tar::Builder type by default will build GNU archives, but + // unfortunately we force it here to use UStar archives instead. The + // UStar format has more limitations on the length of path name that it + // can encode, so it's not quite as nice to use. + // + // Older cargos, however, had a bug where GNU archives were interpreted + // as UStar archives. This bug means that if we publish a GNU archive + // which has fully filled out metadata it'll be corrupt when unpacked by + // older cargos. + // + // Hopefully in the future after enough cargos have been running around + // with the bugfixed tar-rs library we'll be able to switch this over to + // GNU archives, but for now we'll just say that you can't encode paths + // in archives that are *too* long. + // + // For an instance of this in the wild, use the tar-rs 0.3.3 library to + // unpack the selectors 0.4.0 crate on crates.io. Either that or take a + // look at rust-lang/cargo#2326 + let mut header = Header::new_ustar(); + header.set_path(&path).chain_err(|| { + format!("failed to add to archive: `{}`", relative) + })?; + let mut file = File::open(file).chain_err(|| { + format!("failed to open for archiving: `{}`", file.display()) + })?; + let metadata = file.metadata().chain_err(|| { + format!("could not learn metadata for: `{}`", relative) + })?; + header.set_metadata(&metadata); + + if relative == "Cargo.toml" { + let orig = Path::new(&path).with_file_name("Cargo.toml.orig"); + header.set_path(&orig)?; + header.set_cksum(); + ar.append(&header, &mut file).chain_err(|| { + internal(format!("could not archive source file `{}`", relative)) + })?; + + let mut header = Header::new_ustar(); + let toml = pkg.to_registry_toml(); + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(toml.len() as u64); + header.set_cksum(); + ar.append(&header, toml.as_bytes()).chain_err(|| { + internal(format!("could not archive source file `{}`", relative)) + })?; + } else { + header.set_cksum(); + ar.append(&header, &mut file).chain_err(|| { + internal(format!("could not archive source file `{}`", relative)) + })?; + } + } + let encoder = ar.into_inner()?; + encoder.finish()?; + Ok(()) +} + +fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> { + let config = ws.config(); + let pkg = ws.current()?; + + config.shell().status("Verifying", pkg)?; + + let f = GzDecoder::new(tar)?; + let dst = pkg.root().join(&format!("target/package/{}-{}", + pkg.name(), pkg.version())); + if fs::metadata(&dst).is_ok() { + fs::remove_dir_all(&dst)?; + } + let mut archive = Archive::new(f); + archive.unpack(dst.parent().unwrap())?; + + // Manufacture an ephemeral workspace to ensure that even if the top-level + // package has a workspace we can still build our new crate. + let id = SourceId::for_path(&dst)?; + let mut src = PathSource::new(&dst, &id, ws.config()); + let new_pkg = src.root_package()?; + let ws = Workspace::ephemeral(new_pkg, config, None, true)?; + + ops::compile_ws(&ws, None, &ops::CompileOptions { + config: config, + jobs: opts.jobs, + target: opts.target, + features: &[], + no_default_features: false, + all_features: false, + spec: ops::Packages::Packages(&[]), + filter: ops::CompileFilter::Default { required_features_filterable: true }, + release: false, + message_format: ops::MessageFormat::Human, + mode: ops::CompileMode::Build, + target_rustdoc_args: None, + target_rustc_args: None, + }, Arc::new(DefaultExecutor))?; + + Ok(()) +} + +// It can often be the case that files of a particular name on one platform +// can't actually be created on another platform. For example files with colons +// in the name are allowed on Unix but not on Windows. +// +// To help out in situations like this, issue about weird filenames when +// packaging as a "heads up" that something may not work on other platforms. +fn check_filename(file: &Path) -> CargoResult<()> { + let name = match file.file_name() { + Some(name) => name, + None => return Ok(()), + }; + let name = match name.to_str() { + Some(name) => name, + None => { + bail!("path does not have a unicode filename which may not unpack \ + on all platforms: {}", file.display()) + } + }; + let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; + if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { + bail!("cannot package a filename with a special character `{}`: {}", + c, file.display()) + } + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_pkgid.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_pkgid.rs new file mode 100644 index 000000000..0461bc4c8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_pkgid.rs @@ -0,0 +1,16 @@ +use ops; +use core::{PackageIdSpec, Workspace}; +use util::CargoResult; + +pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult { + let resolve = match ops::load_pkg_lockfile(ws)? { + Some(resolve) => resolve, + None => bail!("a Cargo.lock must exist for this command"), + }; + + let pkgid = match spec { + Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, + None => ws.current()?.package_id(), + }; + Ok(PackageIdSpec::from_package_id(pkgid)) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_read_manifest.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_read_manifest.rs new file mode 100644 index 000000000..19d9f6aef --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_read_manifest.rs @@ -0,0 +1,165 @@ +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; + +use core::{Package, SourceId, PackageId, EitherManifest}; +use util::{self, Config}; +use util::errors::{CargoResult, CargoResultExt, CargoError}; +use util::important_paths::find_project_manifest_exact; +use util::toml::read_manifest; + +pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) + -> CargoResult<(Package, Vec)> { + trace!("read_package; path={}; source-id={}", path.display(), source_id); + let (manifest, nested) = read_manifest(path, source_id, config)?; + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => { + bail!("found a virtual manifest at `{}` instead of a package \ + manifest", path.display()) + } + }; + + Ok((Package::new(manifest, path), nested)) +} + +pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) + -> CargoResult> { + let mut all_packages = HashMap::new(); + let mut visited = HashSet::::new(); + let mut errors = Vec::::new(); + + trace!("looking for root package: {}, source_id={}", path.display(), source_id); + + walk(path, &mut |dir| { + trace!("looking for child package: {}", dir.display()); + + // Don't recurse into hidden/dot directories unless we're at the toplevel + if dir != path { + let name = dir.file_name().and_then(|s| s.to_str()); + if name.map(|s| s.starts_with('.')) == Some(true) { + return Ok(false) + } + + // Don't automatically discover packages across git submodules + if fs::metadata(&dir.join(".git")).is_ok() { + return Ok(false) + } + } + + // Don't ever look at target directories + if dir.file_name().and_then(|s| s.to_str()) == Some("target") && + has_manifest(dir.parent().unwrap()) { + return Ok(false) + } + + if has_manifest(dir) { + read_nested_packages(dir, &mut all_packages, source_id, config, + &mut visited, &mut errors)?; + } + Ok(true) + })?; + + if all_packages.is_empty() { + match errors.pop() { + Some(err) => Err(err), + None => Err(format!("Could not find Cargo.toml in `{}`", path.display()).into()), + } + } else { + Ok(all_packages.into_iter().map(|(_, v)| v).collect()) + } +} + +fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) + -> CargoResult<()> { + if !callback(path)? { + trace!("not processing {}", path.display()); + return Ok(()) + } + + // Ignore any permission denied errors because temporary directories + // can often have some weird permissions on them. + let dirs = match fs::read_dir(path) { + Ok(dirs) => dirs, + Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => { + return Ok(()) + } + Err(e) => { + return Err(e).chain_err(|| { + format!("failed to read directory `{}`", path.display()) + }) + } + }; + for dir in dirs { + let dir = dir?; + if dir.file_type()?.is_dir() { + walk(&dir.path(), callback)?; + } + } + Ok(()) +} + +fn has_manifest(path: &Path) -> bool { + find_project_manifest_exact(path, "Cargo.toml").is_ok() +} + +fn read_nested_packages(path: &Path, + all_packages: &mut HashMap, + source_id: &SourceId, + config: &Config, + visited: &mut HashSet, + errors: &mut Vec) -> CargoResult<()> { + if !visited.insert(path.to_path_buf()) { return Ok(()) } + + let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; + + let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) { + Err(err) => { + // Ignore malformed manifests found on git repositories + // + // git source try to find and read all manifests from the repository + // but since it's not possible to exclude folders from this search + // it's safer to ignore malformed manifests to avoid + // + // TODO: Add a way to exclude folders? + info!("skipping malformed package found at `{}`", + path.to_string_lossy()); + errors.push(err); + return Ok(()); + } + Ok(tuple) => tuple + }; + + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => return Ok(()), + }; + let pkg = Package::new(manifest, &manifest_path); + + let pkg_id = pkg.package_id().clone(); + if !all_packages.contains_key(&pkg_id) { + all_packages.insert(pkg_id, pkg); + } else { + info!("skipping nested package `{}` found at `{}`", + pkg.name(), path.to_string_lossy()); + } + + // Registry sources are not allowed to have `path=` dependencies because + // they're all translated to actual registry dependencies. + // + // We normalize the path here ensure that we don't infinitely walk around + // looking for crates. By normalizing we ensure that we visit this crate at + // most once. + // + // TODO: filesystem/symlink implications? + if !source_id.is_registry() { + for p in nested.iter() { + let path = util::normalize_path(&path.join(p)); + read_nested_packages(&path, all_packages, source_id, + config, visited, errors)?; + } + } + + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_run.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_run.rs new file mode 100644 index 000000000..3a4e7f6f2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_run.rs @@ -0,0 +1,74 @@ +use std::path::Path; + +use ops::{self, Packages}; +use util::{self, CargoResult, CargoError, ProcessError}; +use util::errors::CargoErrorKind; +use core::Workspace; + +pub fn run(ws: &Workspace, + options: &ops::CompileOptions, + args: &[String]) -> CargoResult> { + let config = ws.config(); + + let pkg = match options.spec { + Packages::All => unreachable!("cargo run supports single package only"), + Packages::OptOut(_) => unreachable!("cargo run supports single package only"), + Packages::Packages(xs) => match xs.len() { + 0 => ws.current()?, + 1 => ws.members() + .find(|pkg| pkg.name() == xs[0]) + .ok_or_else(|| + CargoError::from( + format!("package `{}` is not a member of the workspace", xs[0])) + )?, + _ => unreachable!("cargo run supports single package only"), + } + }; + + let mut bins = pkg.manifest().targets().iter().filter(|a| { + !a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() { + a.is_bin() + } else { + options.filter.matches(a) + } + }); + if bins.next().is_none() { + if !options.filter.is_specific() { + bail!("a bin target must be available for `cargo run`") + } else { + // this will be verified in cargo_compile + } + } + if bins.next().is_some() { + if !options.filter.is_specific() { + bail!("`cargo run` requires that a project only have one \ + executable; use the `--bin` option to specify which one \ + to run") + } else { + bail!("`cargo run` can run at most one executable, but \ + multiple were specified") + } + } + + let compile = ops::compile(ws, options)?; + assert_eq!(compile.binaries.len(), 1); + let exe = &compile.binaries[0]; + let exe = match util::without_prefix(exe, config.cwd()) { + Some(path) if path.file_name() == Some(path.as_os_str()) + => Path::new(".").join(path).to_path_buf(), + Some(path) => path.to_path_buf(), + None => exe.to_path_buf(), + }; + let mut process = compile.target_process(exe, pkg)?; + process.args(args).cwd(config.cwd()); + + config.shell().status("Running", process.to_string())?; + + let result = process.exec_replace(); + + match result { + Ok(()) => Ok(None), + Err(CargoError(CargoErrorKind::ProcessErrorKind(e), ..)) => Ok(Some(e)), + Err(e) => Err(e) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/compilation.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/compilation.rs new file mode 100644 index 000000000..b3643b6bf --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/compilation.rs @@ -0,0 +1,187 @@ +use std::collections::{HashMap, HashSet}; +use std::ffi::OsStr; +use std::path::PathBuf; +use semver::Version; + +use core::{PackageId, Package, Target, TargetKind}; +use util::{self, CargoResult, Config, LazyCell, ProcessBuilder, process, join_paths}; + +/// A structure returning the result of a compilation. +pub struct Compilation<'cfg> { + /// A mapping from a package to the list of libraries that need to be + /// linked when working with that package. + pub libraries: HashMap>, + + /// An array of all tests created during this compilation. + pub tests: Vec<(Package, TargetKind, String, PathBuf)>, + + /// An array of all binaries created. + pub binaries: Vec, + + /// All directories for the output of native build commands. + /// + /// This is currently used to drive some entries which are added to the + /// LD_LIBRARY_PATH as appropriate. + // TODO: deprecated, remove + pub native_dirs: HashSet, + + /// Root output directory (for the local package's artifacts) + pub root_output: PathBuf, + + /// Output directory for rust dependencies. + /// May be for the host or for a specific target. + pub deps_output: PathBuf, + + /// Output directory for the rust host dependencies. + pub host_deps_output: PathBuf, + + /// The path to rustc's own libstd + pub host_dylib_path: Option, + + /// The path to libstd for the target + pub target_dylib_path: Option, + + /// Extra environment variables that were passed to compilations and should + /// be passed to future invocations of programs. + pub extra_env: HashMap>, + + pub to_doc_test: Vec, + + /// Features per package enabled during this compilation. + pub cfgs: HashMap>, + + pub target: String, + + config: &'cfg Config, + + target_runner: LazyCell)>>, +} + +impl<'cfg> Compilation<'cfg> { + pub fn new(config: &'cfg Config) -> Compilation<'cfg> { + Compilation { + libraries: HashMap::new(), + native_dirs: HashSet::new(), // TODO: deprecated, remove + root_output: PathBuf::from("/"), + deps_output: PathBuf::from("/"), + host_deps_output: PathBuf::from("/"), + host_dylib_path: None, + target_dylib_path: None, + tests: Vec::new(), + binaries: Vec::new(), + extra_env: HashMap::new(), + to_doc_test: Vec::new(), + cfgs: HashMap::new(), + config: config, + target: String::new(), + target_runner: LazyCell::new(), + } + } + + /// See `process`. + pub fn rustc_process(&self, pkg: &Package) -> CargoResult { + self.fill_env(self.config.rustc()?.process(), pkg, true) + } + + /// See `process`. + pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult { + self.fill_env(process(&*self.config.rustdoc()?), pkg, false) + } + + /// See `process`. + pub fn host_process>(&self, cmd: T, pkg: &Package) + -> CargoResult { + self.fill_env(process(cmd), pkg, true) + } + + fn target_runner(&self) -> CargoResult<&Option<(PathBuf, Vec)>> { + self.target_runner.get_or_try_init(|| { + let key = format!("target.{}.runner", self.target); + Ok(self.config.get_path_and_args(&key)?.map(|v| v.val)) + }) + } + + /// See `process`. + pub fn target_process>(&self, cmd: T, pkg: &Package) + -> CargoResult { + let builder = if let Some((ref runner, ref args)) = *self.target_runner()? { + let mut builder = process(runner); + builder.args(args); + builder.arg(cmd); + builder + } else { + process(cmd) + }; + self.fill_env(builder, pkg, false) + } + + /// Prepares a new process with an appropriate environment to run against + /// the artifacts produced by the build process. + /// + /// The package argument is also used to configure environment variables as + /// well as the working directory of the child process. + fn fill_env(&self, mut cmd: ProcessBuilder, pkg: &Package, is_host: bool) + -> CargoResult { + + let mut search_path = if is_host { + let mut search_path = vec![self.host_deps_output.clone()]; + search_path.extend(self.host_dylib_path.clone()); + search_path + } else { + let mut search_path = + super::filter_dynamic_search_path(self.native_dirs.iter(), + &self.root_output); + search_path.push(self.root_output.clone()); + search_path.push(self.deps_output.clone()); + search_path.extend(self.target_dylib_path.clone()); + search_path + }; + + search_path.extend(util::dylib_path().into_iter()); + let search_path = join_paths(&search_path, util::dylib_path_envvar())?; + + cmd.env(util::dylib_path_envvar(), &search_path); + if let Some(env) = self.extra_env.get(pkg.package_id()) { + for &(ref k, ref v) in env { + cmd.env(k, v); + } + } + + let metadata = pkg.manifest().metadata(); + + let cargo_exe = self.config.cargo_exe()?; + cmd.env(::CARGO_ENV, cargo_exe); + + // When adding new environment variables depending on + // crate properties which might require rebuild upon change + // consider adding the corresponding properties to the hash + // in Context::target_metadata() + cmd.env("CARGO_MANIFEST_DIR", pkg.root()) + .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) + .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) + .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) + .env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version())) + .env("CARGO_PKG_VERSION", &pkg.version().to_string()) + .env("CARGO_PKG_NAME", &pkg.name()) + .env("CARGO_PKG_DESCRIPTION", metadata.description.as_ref().unwrap_or(&String::new())) + .env("CARGO_PKG_HOMEPAGE", metadata.homepage.as_ref().unwrap_or(&String::new())) + .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) + .cwd(pkg.root()); + Ok(cmd) + } +} + +fn pre_version_component(v: &Version) -> String { + if v.pre.is_empty() { + return String::new(); + } + + let mut ret = String::new(); + + for (i, x) in v.pre.iter().enumerate() { + if i != 0 { ret.push('.') }; + ret.push_str(&x.to_string()); + } + + ret +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/context.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/context.rs new file mode 100644 index 000000000..369c9d7c5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/context.rs @@ -0,0 +1,1313 @@ +#![allow(deprecated)] + +use std::collections::{HashSet, HashMap, BTreeSet}; +use std::collections::hash_map::Entry; +use std::env; +use std::fmt; +use std::hash::{Hasher, Hash, SipHasher}; +use std::path::{Path, PathBuf}; +use std::str::{self, FromStr}; +use std::sync::Arc; +use std::cell::RefCell; + +use jobserver::Client; + +use core::{Package, PackageId, PackageSet, Resolve, Target, Profile}; +use core::{TargetKind, Profiles, Dependency, Workspace}; +use core::dependency::Kind as DepKind; +use util::{self, ProcessBuilder, internal, Config, profile, Cfg, CfgExpr}; +use util::errors::{CargoResult, CargoResultExt}; + +use super::TargetConfig; +use super::custom_build::{BuildState, BuildScripts, BuildDeps}; +use super::fingerprint::Fingerprint; +use super::layout::Layout; +use super::links::Links; +use super::{Kind, Compilation, BuildConfig}; + +/// All information needed to define a Unit. +/// +/// A unit is an object that has enough information so that cargo knows how to build it. +/// For example, if your project has dependencies, then every dependency will be built as a library +/// unit. If your project is a library, then it will be built as a library unit as well, or if it +/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types +/// for `test`ing and `check`ing, amongst others. +/// +/// The unit also holds information about all possible metadata about the package in `pkg`. +/// +/// A unit needs to know extra information in addition to the type and root source file. For +/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know +/// whether you want a debug or release build. There is enough information in this struct to figure +/// all that out. +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub struct Unit<'a> { + /// Information about avaiable targets, which files to include/exclude, etc. Basically stuff in + /// `Cargo.toml`. + pub pkg: &'a Package, + /// Information about the specific target to build, out of the possible targets in `pkg`. Not + /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a + /// build. + pub target: &'a Target, + /// The profile contains information about *how* the build should be run, including debug + /// level, extra args to pass to rustc, etc. + pub profile: &'a Profile, + /// Whether this compilation unit is for the host or target architecture. + /// + /// For example, when + /// cross compiling and using a custom build script, the build script needs to be compiled for + /// the host architecture so the host rustc can use it (when compiling to the target + /// architecture). + pub kind: Kind, +} + +/// Type of each file generated by a Unit. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum TargetFileType { + /// Not a special file type. + Normal, + /// It is something you can link against (e.g. a library) + Linkable, + /// It is a piece of external debug information (e.g. *.dSYM and *.pdb) + DebugInfo, +} + +/// The build context, containing all information about a build task +pub struct Context<'a, 'cfg: 'a> { + /// The workspace the build is for + pub ws: &'a Workspace<'cfg>, + /// The cargo configuration + pub config: &'cfg Config, + /// The dependency graph for our build + pub resolve: &'a Resolve, + /// Information on the compilation output + pub compilation: Compilation<'cfg>, + pub packages: &'a PackageSet<'cfg>, + pub build_state: Arc, + pub build_script_overridden: HashSet<(PackageId, Kind)>, + pub build_explicit_deps: HashMap, BuildDeps>, + pub fingerprints: HashMap, Arc>, + pub compiled: HashSet>, + pub build_config: BuildConfig, + pub build_scripts: HashMap, Arc>, + pub links: Links<'a>, + pub used_in_plugin: HashSet>, + pub jobserver: Client, + + /// The target directory layout for the host (and target if it is the same as host) + host: Layout, + /// The target directory layout for the target (if different from then host) + target: Option, + target_info: TargetInfo, + host_info: TargetInfo, + profiles: &'a Profiles, + incremental_enabled: bool, + + /// For each Unit, a list all files produced as a triple of + /// + /// - File name that will be produced by the build process (in `deps`) + /// - If it should be linked into `target`, and what it should be called (e.g. without + /// metadata). + /// - Type of the file (library / debug symbol / else) + target_filenames: HashMap, Arc, TargetFileType)>>>, + target_metadatas: HashMap, Option>, +} + +#[derive(Clone, Default)] +struct TargetInfo { + crate_type_process: Option, + crate_types: RefCell>>, + cfg: Option>, +} + +impl TargetInfo { + fn discover_crate_type(&self, crate_type: &str) -> CargoResult> { + let mut process = self.crate_type_process.clone().unwrap(); + + process.arg("--crate-type").arg(crate_type); + + let output = process.exec_with_output().chain_err(|| { + format!("failed to run `rustc` to learn about \ + crate-type {} information", crate_type) + })?; + + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + Ok(parse_crate_type(crate_type, error, &mut output.lines())?) + } +} + +#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] +pub struct Metadata(u64); + +impl<'a, 'cfg> Context<'a, 'cfg> { + pub fn new(ws: &'a Workspace<'cfg>, + resolve: &'a Resolve, + packages: &'a PackageSet<'cfg>, + config: &'cfg Config, + build_config: BuildConfig, + profiles: &'a Profiles) -> CargoResult> { + + let dest = if build_config.release { "release" } else { "debug" }; + let host_layout = Layout::new(ws, None, dest)?; + let target_layout = match build_config.requested_target.as_ref() { + Some(target) => Some(Layout::new(ws, Some(target), dest)?), + None => None, + }; + + // Enable incremental builds if the user opts in. For now, + // this is an environment variable until things stabilize a + // bit more. + let incremental_enabled = match env::var("CARGO_INCREMENTAL") { + Ok(v) => v == "1", + Err(_) => false, + }; + + // -Z can only be used on nightly builds; other builds complain loudly. + // Since incremental builds only work on nightly anyway, we silently + // ignore CARGO_INCREMENTAL on anything but nightly. This allows users + // to always have CARGO_INCREMENTAL set without getting unexpected + // errors on stable/beta builds. + let is_nightly = + config.rustc()?.verbose_version.contains("-nightly") || + config.rustc()?.verbose_version.contains("-dev"); + let incremental_enabled = incremental_enabled && is_nightly; + + // Load up the jobserver that we'll use to manage our parallelism. This + // is the same as the GNU make implementation of a jobserver, and + // intentionally so! It's hoped that we can interact with GNU make and + // all share the same jobserver. + // + // Note that if we don't have a jobserver in our environment then we + // create our own, and we create it with `n-1` tokens because one token + // is ourself, a running process. + let jobserver = match config.jobserver_from_env() { + Some(c) => c.clone(), + None => Client::new(build_config.jobs as usize - 1).chain_err(|| { + "failed to create jobserver" + })?, + }; + + Ok(Context { + ws: ws, + host: host_layout, + target: target_layout, + resolve: resolve, + packages: packages, + config: config, + target_info: TargetInfo::default(), + host_info: TargetInfo::default(), + compilation: Compilation::new(config), + build_state: Arc::new(BuildState::new(&build_config)), + build_config: build_config, + fingerprints: HashMap::new(), + profiles: profiles, + compiled: HashSet::new(), + build_scripts: HashMap::new(), + build_explicit_deps: HashMap::new(), + links: Links::new(), + used_in_plugin: HashSet::new(), + incremental_enabled: incremental_enabled, + jobserver: jobserver, + build_script_overridden: HashSet::new(), + + // TODO: Pre-Calculate these with a topo-sort, rather than lazy-calculating + target_filenames: HashMap::new(), + target_metadatas: HashMap::new(), + }) + } + + /// Prepare this context, ensuring that all filesystem directories are in + /// place. + pub fn prepare(&mut self) -> CargoResult<()> { + let _p = profile::start("preparing layout"); + + self.host.prepare().chain_err(|| { + internal("couldn't prepare build directories") + })?; + if let Some(ref mut target) = self.target { + target.prepare().chain_err(|| { + internal("couldn't prepare build directories") + })?; + } + + self.compilation.host_deps_output = self.host.deps().to_path_buf(); + + let layout = self.target.as_ref().unwrap_or(&self.host); + self.compilation.root_output = layout.dest().to_path_buf(); + self.compilation.deps_output = layout.deps().to_path_buf(); + Ok(()) + } + + /// Ensure that we've collected all target-specific information to compile + /// all the units mentioned in `units`. + pub fn probe_target_info(&mut self, units: &[Unit<'a>]) -> CargoResult<()> { + let mut crate_types = BTreeSet::new(); + let mut visited_units = HashSet::new(); + // pre-fill with `bin` for learning about tests (nothing may be + // explicitly `bin`) as well as `rlib` as it's the coalesced version of + // `lib` in the compiler and we're not sure which we'll see. + crate_types.insert("bin".to_string()); + crate_types.insert("rlib".to_string()); + for unit in units { + self.visit_crate_type(unit, &mut crate_types, &mut visited_units)?; + } + debug!("probe_target_info: crate_types={:?}", crate_types); + self.probe_target_info_kind(&crate_types, Kind::Target)?; + if self.requested_target().is_none() { + self.host_info = self.target_info.clone(); + } else { + self.probe_target_info_kind(&crate_types, Kind::Host)?; + } + Ok(()) + } + + /// A recursive function that checks all crate types (`rlib`, ...) are in `crate_types` + /// for this unit and its dependencies. + /// + /// Tracks visited units to avoid unnecessary work. + fn visit_crate_type(&self, + unit: &Unit<'a>, + crate_types: &mut BTreeSet, + visited_units: &mut HashSet>) + -> CargoResult<()> { + if !visited_units.insert(*unit) { + return Ok(()); + } + for target in unit.pkg.manifest().targets() { + crate_types.extend(target.rustc_crate_types().iter().map(|s| { + if *s == "lib" { + "rlib".to_string() + } else { + s.to_string() + } + })); + } + for dep in self.dep_targets(unit)? { + self.visit_crate_type(&dep, crate_types, visited_units)?; + } + Ok(()) + } + + fn probe_target_info_kind(&mut self, + crate_types: &BTreeSet, + kind: Kind) + -> CargoResult<()> { + let rustflags = env_args(self.config, + &self.build_config, + self.info(&kind), + kind, + "RUSTFLAGS")?; + let mut process = self.config.rustc()?.process(); + process.arg("-") + .arg("--crate-name").arg("___") + .arg("--print=file-names") + .args(&rustflags) + .env_remove("RUST_LOG"); + + if kind == Kind::Target { + process.arg("--target").arg(&self.target_triple()); + } + + let crate_type_process = process.clone(); + + for crate_type in crate_types { + process.arg("--crate-type").arg(crate_type); + } + + let mut with_cfg = process.clone(); + with_cfg.arg("--print=sysroot"); + with_cfg.arg("--print=cfg"); + + let mut has_cfg_and_sysroot = true; + let output = with_cfg.exec_with_output().or_else(|_| { + has_cfg_and_sysroot = false; + process.exec_with_output() + }).chain_err(|| { + "failed to run `rustc` to learn about target-specific information" + })?; + + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + let mut lines = output.lines(); + let mut map = HashMap::new(); + for crate_type in crate_types { + let out = parse_crate_type(crate_type, error, &mut lines)?; + map.insert(crate_type.to_string(), out); + } + + if has_cfg_and_sysroot { + let line = match lines.next() { + Some(line) => line, + None => bail!("output of --print=sysroot missing when learning about \ + target-specific information from rustc"), + }; + let mut rustlib = PathBuf::from(line); + if kind == Kind::Host { + if cfg!(windows) { + rustlib.push("bin"); + } else { + rustlib.push("lib"); + } + self.compilation.host_dylib_path = Some(rustlib); + } else { + rustlib.push("lib"); + rustlib.push("rustlib"); + rustlib.push(self.target_triple()); + rustlib.push("lib"); + self.compilation.target_dylib_path = Some(rustlib); + } + } + + let cfg = if has_cfg_and_sysroot { + Some(try!(lines.map(Cfg::from_str).collect())) + } else { + None + }; + + let info = match kind { + Kind::Target => &mut self.target_info, + Kind::Host => &mut self.host_info, + }; + info.crate_type_process = Some(crate_type_process); + info.crate_types = RefCell::new(map); + info.cfg = cfg; + Ok(()) + } + + /// Builds up the `used_in_plugin` internal to this context from the list of + /// top-level units. + /// + /// This will recursively walk `units` and all of their dependencies to + /// determine which crate are going to be used in plugins or not. + pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) + -> CargoResult<()> { + let mut visited = HashSet::new(); + for unit in units { + self.walk_used_in_plugin_map(unit, + unit.target.for_host(), + &mut visited)?; + } + Ok(()) + } + + fn walk_used_in_plugin_map(&mut self, + unit: &Unit<'a>, + is_plugin: bool, + visited: &mut HashSet<(Unit<'a>, bool)>) + -> CargoResult<()> { + if !visited.insert((*unit, is_plugin)) { + return Ok(()) + } + if is_plugin { + self.used_in_plugin.insert(*unit); + } + for unit in self.dep_targets(unit)? { + self.walk_used_in_plugin_map(&unit, + is_plugin || unit.target.for_host(), + visited)?; + } + Ok(()) + } + + /// Returns the appropriate directory layout for either a plugin or not. + fn layout(&self, kind: Kind) -> &Layout { + match kind { + Kind::Host => &self.host, + Kind::Target => self.target.as_ref().unwrap_or(&self.host) + } + } + + /// Returns the directories where Rust crate dependencies are found for the + /// specified unit. + pub fn deps_dir(&self, unit: &Unit) -> &Path { + self.layout(unit.kind).deps() + } + + /// Returns the directory for the specified unit where fingerprint + /// information is stored. + pub fn fingerprint_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + let dir = self.pkg_dir(unit); + self.layout(unit.kind).fingerprint().join(dir) + } + + /// Returns the appropriate directory layout for either a plugin or not. + pub fn build_script_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(!unit.profile.run_custom_build); + let dir = self.pkg_dir(unit); + self.layout(Kind::Host).build().join(dir) + } + + /// Returns the appropriate directory layout for either a plugin or not. + pub fn build_script_out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(unit.profile.run_custom_build); + let dir = self.pkg_dir(unit); + self.layout(unit.kind).build().join(dir).join("out") + } + + pub fn host_deps(&self) -> &Path { + self.host.deps() + } + + /// Return the root of the build output tree + pub fn target_root(&self) -> &Path { + self.host.dest() + } + + /// Returns the appropriate output directory for the specified package and + /// target. + pub fn out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + if unit.profile.doc { + self.layout(unit.kind).root().parent().unwrap().join("doc") + } else if unit.target.is_custom_build() { + self.build_script_dir(unit) + } else if unit.target.is_example() { + self.layout(unit.kind).examples().to_path_buf() + } else { + self.deps_dir(unit).to_path_buf() + } + } + + fn pkg_dir(&mut self, unit: &Unit<'a>) -> String { + let name = unit.pkg.package_id().name(); + match self.target_metadata(unit) { + Some(meta) => format!("{}-{}", name, meta), + None => format!("{}-{}", name, self.target_short_hash(unit)), + } + } + + /// Return the host triple for this context + pub fn host_triple(&self) -> &str { + &self.build_config.host_triple + } + + /// Return the target triple which this context is targeting. + pub fn target_triple(&self) -> &str { + self.requested_target().unwrap_or(self.host_triple()) + } + + /// Requested (not actual) target for the build + pub fn requested_target(&self) -> Option<&str> { + self.build_config.requested_target.as_ref().map(|s| &s[..]) + } + + /// Get the short hash based only on the PackageId + /// Used for the metadata when target_metadata returns None + pub fn target_short_hash(&self, unit: &Unit) -> String { + let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); + util::short_hash(&hashable) + } + + /// Get the metadata for a target in a specific profile + /// We build to the path: "{filename}-{target_metadata}" + /// We use a linking step to link/copy to a predictable filename + /// like `target/debug/libfoo.{a,so,rlib}` and such. + pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option { + if let Some(cache) = self.target_metadatas.get(unit) { + return cache.clone() + } + + let metadata = self.calc_target_metadata(unit); + self.target_metadatas.insert(*unit, metadata.clone()); + metadata + } + + fn calc_target_metadata(&mut self, unit: &Unit<'a>) -> Option { + // No metadata for dylibs because of a couple issues + // - OSX encodes the dylib name in the executable + // - Windows rustc multiple files of which we can't easily link all of them + // + // No metadata for bin because of an issue + // - wasm32 rustc/emcc encodes the .wasm name in the .js (rust-lang/cargo#4535) + // + // Two exceptions + // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict) + // 2) __CARGO_DEFAULT_LIB_METADATA env var + // + // Note, though, that the compiler's build system at least wants + // path dependencies (eg libstd) to have hashes in filenames. To account for + // that we have an extra hack here which reads the + // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a + // hash in the filename if that's present. + // + // This environment variable should not be relied on! It's + // just here for rustbuild. We need a more principled method + // doing this eventually. + let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); + if !unit.profile.test && + (unit.target.is_dylib() || unit.target.is_cdylib() || + (unit.target.is_bin() && self.target_triple().starts_with("wasm32-"))) && + unit.pkg.package_id().source_id().is_path() && + !__cargo_default_lib_metadata.is_ok() + { + return None; + } + + let mut hasher = SipHasher::new_with_keys(0, 0); + + // Unique metadata per (name, source, version) triple. This'll allow us + // to pull crates from anywhere w/o worrying about conflicts + unit.pkg.package_id().stable_hash(self.ws.root()).hash(&mut hasher); + + // Add package properties which map to environment variables + // exposed by Cargo + let manifest_metadata = unit.pkg.manifest().metadata(); + manifest_metadata.authors.hash(&mut hasher); + manifest_metadata.description.hash(&mut hasher); + manifest_metadata.homepage.hash(&mut hasher); + + // Also mix in enabled features to our metadata. This'll ensure that + // when changing feature sets each lib is separately cached. + self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher); + + // Mix in the target-metadata of all the dependencies of this target + if let Ok(deps) = self.dep_targets(unit) { + let mut deps_metadata = deps.into_iter().map(|dep_unit| { + self.target_metadata(&dep_unit) + }).collect::>(); + deps_metadata.sort(); + deps_metadata.hash(&mut hasher); + } + + // Throw in the profile we're compiling with. This helps caching + // panic=abort and panic=unwind artifacts, additionally with various + // settings like debuginfo and whatnot. + unit.profile.hash(&mut hasher); + + // Artifacts compiled for the host should have a different metadata + // piece than those compiled for the target, so make sure we throw in + // the unit's `kind` as well + unit.kind.hash(&mut hasher); + + // Finally throw in the target name/kind. This ensures that concurrent + // compiles of targets in the same crate don't collide. + unit.target.name().hash(&mut hasher); + unit.target.kind().hash(&mut hasher); + + if let Ok(rustc) = self.config.rustc() { + rustc.verbose_version.hash(&mut hasher); + } + + // Seed the contents of __CARGO_DEFAULT_LIB_METADATA to the hasher if present. + // This should be the release channel, to get a different hash for each channel. + if let Ok(ref channel) = __cargo_default_lib_metadata { + channel.hash(&mut hasher); + } + + Some(Metadata(hasher.finish())) + } + + /// Returns the file stem for a given target/profile combo (with metadata) + pub fn file_stem(&mut self, unit: &Unit<'a>) -> String { + match self.target_metadata(unit) { + Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), + metadata), + None => self.bin_stem(unit), + } + } + + /// Returns the bin stem for a given target (without metadata) + fn bin_stem(&self, unit: &Unit) -> String { + if unit.target.allows_underscores() { + unit.target.name().to_string() + } else { + unit.target.crate_name() + } + } + + /// Returns a tuple with the directory and name of the hard link we expect + /// our target to be copied to. Eg, file_stem may be out_dir/deps/foo-abcdef + /// and link_stem would be out_dir/foo + /// This function returns it in two parts so the caller can add prefix/suffix + /// to filename separately + /// + /// Returns an Option because in some cases we don't want to link + /// (eg a dependent lib) + pub fn link_stem(&mut self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { + let src_dir = self.out_dir(unit); + let bin_stem = self.bin_stem(unit); + let file_stem = self.file_stem(unit); + + // We currently only lift files up from the `deps` directory. If + // it was compiled into something like `example/` or `doc/` then + // we don't want to link it up. + if src_dir.ends_with("deps") { + // Don't lift up library dependencies + if self.ws.members().find(|&p| p == unit.pkg).is_none() && + !unit.target.is_bin() { + None + } else { + Some(( + src_dir.parent().unwrap().to_owned(), + if unit.profile.test {file_stem} else {bin_stem}, + )) + } + } else if bin_stem == file_stem { + None + } else if src_dir.ends_with("examples") + || src_dir.parent().unwrap().ends_with("build") { + Some((src_dir, bin_stem)) + } else { + None + } + } + + /// Return the filenames that the given target for the given profile will + /// generate as a list of 3-tuples (filename, link_dst, linkable) + /// + /// - filename: filename rustc compiles to. (Often has metadata suffix). + /// - link_dst: Optional file to link/copy the result to (without metadata suffix) + /// - linkable: Whether possible to link against file (eg it's a library) + pub fn target_filenames(&mut self, unit: &Unit<'a>) + -> CargoResult, TargetFileType)>>> { + if let Some(cache) = self.target_filenames.get(unit) { + return Ok(Arc::clone(cache)) + } + + let result = self.calc_target_filenames(unit); + if let Ok(ref ret) = result { + self.target_filenames.insert(*unit, Arc::clone(ret)); + } + result + } + + fn calc_target_filenames(&mut self, unit: &Unit<'a>) + -> CargoResult, TargetFileType)>>> { + let out_dir = self.out_dir(unit); + let stem = self.file_stem(unit); + let link_stem = self.link_stem(unit); + let info = if unit.target.for_host() { + &self.host_info + } else { + &self.target_info + }; + + let mut ret = Vec::new(); + let mut unsupported = Vec::new(); + { + if unit.profile.check { + let filename = out_dir.join(format!("lib{}.rmeta", stem)); + let link_dst = link_stem.clone().map(|(ld, ls)| { + ld.join(format!("lib{}.rmeta", ls)) + }); + ret.push((filename, link_dst, TargetFileType::Linkable)); + } else { + let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> { + let crate_type = if crate_type == "lib" {"rlib"} else {crate_type}; + let mut crate_types = info.crate_types.borrow_mut(); + let entry = crate_types.entry(crate_type.to_string()); + let crate_type_info = match entry { + Entry::Occupied(o) => &*o.into_mut(), + Entry::Vacant(v) => { + let value = info.discover_crate_type(v.key())?; + &*v.insert(value) + } + }; + match *crate_type_info { + Some((ref prefix, ref suffix)) => { + let suffixes = add_target_specific_suffixes( + &self.target_triple(), + &crate_type, + unit.target.kind(), + suffix, + file_type, + ); + for (suffix, file_type, should_replace_hyphens) in suffixes { + // wasm bin target will generate two files in deps such as + // "web-stuff.js" and "web_stuff.wasm". Note the different usages of + // "-" and "_". should_replace_hyphens is a flag to indicate that + // we need to convert the stem "web-stuff" to "web_stuff", so we + // won't miss "web_stuff.wasm". + let conv = |s: String| if should_replace_hyphens { + s.replace("-", "_") + } else { + s + }; + let filename = + out_dir.join(format!("{}{}{}", prefix, conv(stem.clone()), suffix)); + let link_dst = link_stem.clone().map(|(ld, ls)| { + ld.join(format!("{}{}{}", prefix, conv(ls), suffix)) + }); + ret.push((filename, link_dst, file_type)); + } + Ok(()) + } + // not supported, don't worry about it + None => { + unsupported.push(crate_type.to_string()); + Ok(()) + } + } + }; + //info!("{:?}", unit); + match *unit.target.kind() { + TargetKind::Bin | + TargetKind::CustomBuild | + TargetKind::ExampleBin | + TargetKind::Bench | + TargetKind::Test => { + add("bin", TargetFileType::Normal)?; + } + TargetKind::Lib(..) | + TargetKind::ExampleLib(..) + if unit.profile.test => { + add("bin", TargetFileType::Normal)?; + } + TargetKind::ExampleLib(ref kinds) | + TargetKind::Lib(ref kinds) => { + for kind in kinds { + add(kind.crate_type(), if kind.linkable() { + TargetFileType::Linkable + } else { + TargetFileType::Normal + })?; + } + } + } + } + } + if ret.is_empty() { + if !unsupported.is_empty() { + bail!("cannot produce {} for `{}` as the target `{}` \ + does not support these crate types", + unsupported.join(", "), unit.pkg, self.target_triple()) + } + bail!("cannot compile `{}` as the target `{}` does not \ + support any of the output crate types", + unit.pkg, self.target_triple()); + } + info!("Target filenames: {:?}", ret); + + Ok(Arc::new(ret)) + } + + /// For a package, return all targets which are registered as dependencies + /// for that package. + pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult>> { + if unit.profile.run_custom_build { + return self.dep_run_custom_build(unit) + } else if unit.profile.doc && !unit.profile.test { + return self.doc_deps(unit); + } + + let id = unit.pkg.package_id(); + let deps = self.resolve.deps(id); + let mut ret = deps.filter(|dep| { + unit.pkg.dependencies().iter().filter(|d| { + d.name() == dep.name() && d.version_req().matches(dep.version()) + }).any(|d| { + // If this target is a build command, then we only want build + // dependencies, otherwise we want everything *other than* build + // dependencies. + if unit.target.is_custom_build() != d.is_build() { + return false + } + + // If this dependency is *not* a transitive dependency, then it + // only applies to test/example targets + if !d.is_transitive() && !unit.target.is_test() && + !unit.target.is_example() && !unit.profile.test { + return false + } + + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + if !self.dep_platform_activated(d, unit.kind) { + return false + } + + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if d.is_optional() && !self.resolve.features(id).contains(d.name()) { + return false; + } + + // If we've gotten past all that, then this dependency is + // actually used! + true + }) + }).filter_map(|id| { + match self.get_package(id) { + Ok(pkg) => { + pkg.targets().iter().find(|t| t.is_lib()).map(|t| { + let unit = Unit { + pkg: pkg, + target: t, + profile: self.lib_or_check_profile(unit, t), + kind: unit.kind.for_target(t), + }; + Ok(unit) + }) + } + Err(e) => Some(Err(e)) + } + }).collect::>>()?; + + // If this target is a build script, then what we've collected so far is + // all we need. If this isn't a build script, then it depends on the + // build script if there is one. + if unit.target.is_custom_build() { + return Ok(ret) + } + ret.extend(self.dep_build_script(unit)); + + // If this target is a binary, test, example, etc, then it depends on + // the library of the same package. The call to `resolve.deps` above + // didn't include `pkg` in the return values, so we need to special case + // it here and see if we need to push `(pkg, pkg_lib_target)`. + if unit.target.is_lib() && !unit.profile.doc { + return Ok(ret) + } + ret.extend(self.maybe_lib(unit)); + + // Integration tests/benchmarks require binaries to be built + if unit.profile.test && + (unit.target.is_test() || unit.target.is_bench()) { + ret.extend(unit.pkg.targets().iter().filter(|t| { + let no_required_features = Vec::new(); + + t.is_bin() && + // Skip binaries with required features that have not been selected. + t.required_features().unwrap_or(&no_required_features).iter().all(|f| { + self.resolve.features(id).contains(f) + }) + }).map(|t| { + Unit { + pkg: unit.pkg, + target: t, + profile: self.lib_profile(), + kind: unit.kind.for_target(t), + } + })); + } + Ok(ret) + } + + /// Returns the dependencies needed to run a build script. + /// + /// The `unit` provided must represent an execution of a build script, and + /// the returned set of units must all be run before `unit` is run. + pub fn dep_run_custom_build(&self, unit: &Unit<'a>) + -> CargoResult>> { + // If this build script's execution has been overridden then we don't + // actually depend on anything, we've reached the end of the dependency + // chain as we've got all the info we're gonna get. + let key = (unit.pkg.package_id().clone(), unit.kind); + if self.build_script_overridden.contains(&key) { + return Ok(Vec::new()) + } + + // When not overridden, then the dependencies to run a build script are: + // + // 1. Compiling the build script itself + // 2. For each immediate dependency of our package which has a `links` + // key, the execution of that build script. + let not_custom_build = unit.pkg.targets().iter().find(|t| { + !t.is_custom_build() + }).unwrap(); + let tmp = Unit { + target: not_custom_build, + profile: &self.profiles.dev, + ..*unit + }; + let deps = self.dep_targets(&tmp)?; + Ok(deps.iter().filter_map(|unit| { + if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { + return None + } + self.dep_build_script(unit) + }).chain(Some(Unit { + profile: self.build_script_profile(unit.pkg.package_id()), + kind: Kind::Host, // build scripts always compiled for the host + ..*unit + })).collect()) + } + + /// Returns the dependencies necessary to document a package + fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult>> { + let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| { + unit.pkg.dependencies().iter().filter(|d| { + d.name() == dep.name() + }).any(|dep| { + match dep.kind() { + DepKind::Normal => self.dep_platform_activated(dep, + unit.kind), + _ => false, + } + }) + }).map(|dep| { + self.get_package(dep) + }); + + // To document a library, we depend on dependencies actually being + // built. If we're documenting *all* libraries, then we also depend on + // the documentation of the library being built. + let mut ret = Vec::new(); + for dep in deps { + let dep = dep?; + let lib = match dep.targets().iter().find(|t| t.is_lib()) { + Some(lib) => lib, + None => continue, + }; + ret.push(Unit { + pkg: dep, + target: lib, + profile: self.lib_profile(), + kind: unit.kind.for_target(lib), + }); + if self.build_config.doc_all { + ret.push(Unit { + pkg: dep, + target: lib, + profile: &self.profiles.doc, + kind: unit.kind.for_target(lib), + }); + } + } + + // Be sure to build/run the build script for documented libraries as + ret.extend(self.dep_build_script(unit)); + + // If we document a binary, we need the library available + if unit.target.is_bin() { + ret.extend(self.maybe_lib(unit)); + } + Ok(ret) + } + + /// If a build script is scheduled to be run for the package specified by + /// `unit`, this function will return the unit to run that build script. + /// + /// Overriding a build script simply means that the running of the build + /// script itself doesn't have any dependencies, so even in that case a unit + /// of work is still returned. `None` is only returned if the package has no + /// build script. + fn dep_build_script(&self, unit: &Unit<'a>) -> Option> { + unit.pkg.targets().iter().find(|t| t.is_custom_build()).map(|t| { + Unit { + pkg: unit.pkg, + target: t, + profile: &self.profiles.custom_build, + kind: unit.kind, + } + }) + } + + fn maybe_lib(&self, unit: &Unit<'a>) -> Option> { + unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { + Unit { + pkg: unit.pkg, + target: t, + profile: self.lib_or_check_profile(unit, t), + kind: unit.kind.for_target(t), + } + }) + } + + fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + let platform = match dep.platform() { + Some(p) => p, + None => return true, + }; + let (name, info) = match kind { + Kind::Host => (self.host_triple(), &self.host_info), + Kind::Target => (self.target_triple(), &self.target_info), + }; + platform.matches(name, info.cfg.as_ref().map(|cfg| &cfg[..])) + } + + /// Gets a package for the given package id. + pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> { + self.packages.get(id) + } + + /// Get the user-specified linker for a particular host or target + pub fn linker(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) + } + + /// Get the user-specified `ar` program for a particular host or target + pub fn ar(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) + } + + /// Get the list of cfg printed out from the compiler for the specified kind + pub fn cfg(&self, kind: Kind) -> &[Cfg] { + let info = match kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + }; + info.cfg.as_ref().map(|s| &s[..]).unwrap_or(&[]) + } + + /// Get the target configuration for a particular host or target + fn target_config(&self, kind: Kind) -> &TargetConfig { + match kind { + Kind::Host => &self.build_config.host, + Kind::Target => &self.build_config.target, + } + } + + /// Number of jobs specified for this build + pub fn jobs(&self) -> u32 { self.build_config.jobs } + + pub fn lib_profile(&self) -> &'a Profile { + let (normal, test) = if self.build_config.release { + (&self.profiles.release, &self.profiles.bench_deps) + } else { + (&self.profiles.dev, &self.profiles.test_deps) + }; + if self.build_config.test { + test + } else { + normal + } + } + + pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile { + if unit.profile.check && !target.is_custom_build() && !target.for_host() { + &self.profiles.check + } else { + self.lib_profile() + } + } + + pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile { + // TODO: should build scripts always be built with the same library + // profile? How is this controlled at the CLI layer? + self.lib_profile() + } + + pub fn incremental_args(&self, unit: &Unit) -> CargoResult> { + if self.incremental_enabled { + if unit.pkg.package_id().source_id().is_path() { + // Only enable incremental compilation for sources the user can modify. + // For things that change infrequently, non-incremental builds yield + // better performance. + // (see also https://github.com/rust-lang/cargo/issues/3972) + return Ok(vec![format!("-Zincremental={}", + self.layout(unit.kind).incremental().display())]); + } else if unit.profile.codegen_units.is_none() { + // For non-incremental builds we set a higher number of + // codegen units so we get faster compiles. It's OK to do + // so because the user has already opted into slower + // runtime code by setting CARGO_INCREMENTAL. + return Ok(vec![format!("-Ccodegen-units={}", ::num_cpus::get())]); + } + } + + Ok(vec![]) + } + + pub fn rustflags_args(&self, unit: &Unit) -> CargoResult> { + env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTFLAGS") + } + + pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult> { + env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTDOCFLAGS") + } + + pub fn show_warnings(&self, pkg: &PackageId) -> bool { + pkg.source_id().is_path() || self.config.extra_verbose() + } + + fn info(&self, kind: &Kind) -> &TargetInfo { + match *kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + } + } +} + +/// Acquire extra flags to pass to the compiler from various locations. +/// +/// The locations are: +/// +/// - the `RUSTFLAGS` environment variable +/// +/// then if this was not found +/// +/// - `target.*.rustflags` from the manifest (Cargo.toml) +/// - `target.cfg(..).rustflags` from the manifest +/// +/// then if neither of these were found +/// +/// - `build.rustflags` from the manifest +/// +/// Note that if a `target` is specified, no args will be passed to host code (plugins, build +/// scripts, ...), even if it is the same as the target. +fn env_args(config: &Config, + build_config: &BuildConfig, + target_info: &TargetInfo, + kind: Kind, + name: &str) -> CargoResult> { + // We *want* to apply RUSTFLAGS only to builds for the + // requested target architecture, and not to things like build + // scripts and plugins, which may be for an entirely different + // architecture. Cargo's present architecture makes it quite + // hard to only apply flags to things that are not build + // scripts and plugins though, so we do something more hacky + // instead to avoid applying the same RUSTFLAGS to multiple targets + // arches: + // + // 1) If --target is not specified we just apply RUSTFLAGS to + // all builds; they are all going to have the same target. + // + // 2) If --target *is* specified then we only apply RUSTFLAGS + // to compilation units with the Target kind, which indicates + // it was chosen by the --target flag. + // + // This means that, e.g. even if the specified --target is the + // same as the host, build scripts in plugins won't get + // RUSTFLAGS. + let compiling_with_target = build_config.requested_target.is_some(); + let is_target_kind = kind == Kind::Target; + + if compiling_with_target && !is_target_kind { + // This is probably a build script or plugin and we're + // compiling with --target. In this scenario there are + // no rustflags we can apply. + return Ok(Vec::new()); + } + + // First try RUSTFLAGS from the environment + if let Ok(a) = env::var(name) { + let args = a.split(' ') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(str::to_string); + return Ok(args.collect()); + } + + let mut rustflags = Vec::new(); + + let name = name.chars().flat_map(|c| c.to_lowercase()).collect::(); + // Then the target.*.rustflags value... + let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple); + let key = format!("target.{}.{}", target, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + // ...including target.'cfg(...)'.rustflags + if let Some(ref target_cfg) = target_info.cfg { + if let Some(table) = config.get_table("target")? { + let cfgs = table.val.keys().filter_map(|t| { + if t.starts_with("cfg(") && t.ends_with(')') { + let cfg = &t[4..t.len() - 1]; + CfgExpr::from_str(cfg) + .ok() + .and_then(|c| if c.matches(target_cfg) { Some(t) } else { None }) + } else { + None + } + }); + for n in cfgs { + let key = format!("target.{}.{}", n, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + } + } + } + + if !rustflags.is_empty() { + return Ok(rustflags); + } + + // Then the build.rustflags value + let key = format!("build.{}", name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + return Ok(args.collect()); + } + + Ok(Vec::new()) +} + +impl fmt::Display for Metadata { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:016x}", self.0) + } +} + +/// Takes rustc output (using specialized command line args), and calculates the file prefix and +/// suffix for the given crate type, or returns None if the type is not supported. (e.g. for a +/// rust library like libcargo.rlib, prefix = "lib", suffix = "rlib"). +/// +/// The caller needs to ensure that the lines object is at the correct line for the given crate +/// type: this is not checked. +// This function can not handle more than 1 file per type (with wasm32-unknown-emscripten, there +// are 2 files for bin (.wasm and .js)) +fn parse_crate_type( + crate_type: &str, + error: &str, + lines: &mut str::Lines, +) -> CargoResult> { + let not_supported = error.lines().any(|line| { + (line.contains("unsupported crate type") || + line.contains("unknown crate type")) && + line.contains(crate_type) + }); + if not_supported { + return Ok(None); + } + let line = match lines.next() { + Some(line) => line, + None => bail!("malformed output when learning about \ + crate-type {} information", crate_type), + }; + let mut parts = line.trim().split("___"); + let prefix = parts.next().unwrap(); + let suffix = match parts.next() { + Some(part) => part, + None => bail!("output of --print=file-names has changed in \ + the compiler, cannot parse"), + }; + + Ok(Some((prefix.to_string(), suffix.to_string()))) +} + +// (not a rustdoc) +// Return a list of 3-tuples (suffix, file_type, should_replace_hyphens). +// +// should_replace_hyphens will be used by the caller to replace "-" with "_" +// in a bin_stem. See the caller side (calc_target_filenames()) for details. +fn add_target_specific_suffixes( + target_triple: &str, + crate_type: &str, + target_kind: &TargetKind, + suffix: &str, + file_type: TargetFileType, +) -> Vec<(String, TargetFileType, bool)> { + let mut ret = vec![(suffix.to_string(), file_type, false)]; + + // rust-lang/cargo#4500 + if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib") && + suffix == ".dll" + { + ret.push((".dll.lib".to_string(), TargetFileType::Normal, false)); + } + + // rust-lang/cargo#4535 + if target_triple.starts_with("wasm32-") && crate_type == "bin" && + suffix == ".js" + { + ret.push((".wasm".to_string(), TargetFileType::Normal, true)); + } + + // rust-lang/cargo#4490 + // - only uplift *.dSYM for binaries. + // tests are run directly from target/debug/deps/ + // and examples are inside target/debug/examples/ which already have *.dSYM next to them + // so no need to do anything. + if target_triple.contains("-apple-") && *target_kind == TargetKind::Bin { + ret.push((".dSYM".to_string(), TargetFileType::DebugInfo, false)); + } + + ret +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/custom_build.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/custom_build.rs new file mode 100644 index 000000000..ee51b9b3b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/custom_build.rs @@ -0,0 +1,538 @@ +use std::collections::{HashMap, BTreeSet, HashSet}; +use std::fs; +use std::path::{PathBuf, Path}; +use std::str; +use std::sync::{Mutex, Arc}; + +use core::PackageId; +use util::{Freshness, Cfg}; +use util::errors::{CargoResult, CargoResultExt, CargoError}; +use util::{internal, profile, paths}; +use util::machine_message; + +use super::job::Work; +use super::{fingerprint, Kind, Context, Unit}; + +/// Contains the parsed output of a custom build script. +#[derive(Clone, Debug, Hash)] +pub struct BuildOutput { + /// Paths to pass to rustc with the `-L` flag + pub library_paths: Vec, + /// Names and link kinds of libraries, suitable for the `-l` flag + pub library_links: Vec, + /// Various `--cfg` flags to pass to the compiler + pub cfgs: Vec, + /// Additional environment variables to run the compiler with. + pub env: Vec<(String, String)>, + /// Metadata to pass to the immediate dependencies + pub metadata: Vec<(String, String)>, + /// Paths to trigger a rerun of this build script. + pub rerun_if_changed: Vec, + /// Environment variables which, when changed, will cause a rebuild. + pub rerun_if_env_changed: Vec, + /// Warnings generated by this build, + pub warnings: Vec, +} + +/// Map of packages to build info +pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; + +/// Build info and overrides +pub struct BuildState { + pub outputs: Mutex, + overrides: HashMap<(String, Kind), BuildOutput>, +} + +#[derive(Default)] +pub struct BuildScripts { + // Cargo will use this `to_link` vector to add -L flags to compiles as we + // propagate them upwards towards the final build. Note, however, that we + // need to preserve the ordering of `to_link` to be topologically sorted. + // This will ensure that build scripts which print their paths properly will + // correctly pick up the files they generated (if there are duplicates + // elsewhere). + // + // To preserve this ordering, the (id, kind) is stored in two places, once + // in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain + // this as we're building interactively below to ensure that the memory + // usage here doesn't blow up too much. + // + // For more information, see #2354 + pub to_link: Vec<(PackageId, Kind)>, + seen_to_link: HashSet<(PackageId, Kind)>, + pub plugins: BTreeSet, +} + +pub struct BuildDeps { + pub build_script_output: PathBuf, + pub rerun_if_changed: Vec, + pub rerun_if_env_changed: Vec, +} + +/// Prepares a `Work` that executes the target as a custom build script. +/// +/// The `req` given is the requirement which this run of the build script will +/// prepare work for. If the requirement is specified as both the target and the +/// host platforms it is assumed that the two are equal and the build script is +/// only run once (not twice). +pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) + -> CargoResult<(Work, Work, Freshness)> { + let _p = profile::start(format!("build script prepare: {}/{}", + unit.pkg, unit.target.name())); + + let key = (unit.pkg.package_id().clone(), unit.kind); + let overridden = cx.build_script_overridden.contains(&key); + let (work_dirty, work_fresh) = if overridden { + (Work::noop(), Work::noop()) + } else { + build_work(cx, unit)? + }; + + // Now that we've prep'd our work, build the work needed to manage the + // fingerprint and then start returning that upwards. + let (freshness, dirty, fresh) = + fingerprint::prepare_build_cmd(cx, unit)?; + + Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness)) +} + +fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) + -> CargoResult<(Work, Work)> { + let dependencies = cx.dep_run_custom_build(unit)?; + let build_script_unit = dependencies.iter().find(|d| { + !d.profile.run_custom_build && d.target.is_custom_build() + }).expect("running a script not depending on an actual script"); + let script_output = cx.build_script_dir(build_script_unit); + let build_output = cx.build_script_out_dir(unit); + + // Building the command to execute + let to_exec = script_output.join(unit.target.name()); + + // Start preparing the process to execute, starting out with some + // environment variables. Note that the profile-related environment + // variables are not set with this the build script's profile but rather the + // package's library profile. + let profile = cx.lib_profile(); + let to_exec = to_exec.into_os_string(); + let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; + cmd.env("OUT_DIR", &build_output) + .env("CARGO_MANIFEST_DIR", unit.pkg.root()) + .env("NUM_JOBS", &cx.jobs().to_string()) + .env("TARGET", &match unit.kind { + Kind::Host => cx.host_triple(), + Kind::Target => cx.target_triple(), + }) + .env("DEBUG", &profile.debuginfo.is_some().to_string()) + .env("OPT_LEVEL", &profile.opt_level) + .env("PROFILE", if cx.build_config.release { "release" } else { "debug" }) + .env("HOST", cx.host_triple()) + .env("RUSTC", &cx.config.rustc()?.path) + .env("RUSTDOC", &*cx.config.rustdoc()?) + .inherit_jobserver(&cx.jobserver); + + if let Some(links) = unit.pkg.manifest().links() { + cmd.env("CARGO_MANIFEST_LINKS", links); + } + + // Be sure to pass along all enabled features for this package, this is the + // last piece of statically known information that we have. + for feat in cx.resolve.features(unit.pkg.package_id()).iter() { + cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); + } + + let mut cfg_map = HashMap::new(); + for cfg in cx.cfg(unit.kind) { + match *cfg { + Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); } + Cfg::KeyPair(ref k, ref v) => { + match *cfg_map.entry(k.clone()).or_insert(Some(Vec::new())) { + Some(ref mut values) => values.push(v.clone()), + None => { /* ... */ } + } + } + } + } + for (k, v) in cfg_map { + let k = format!("CARGO_CFG_{}", super::envify(&k)); + match v { + Some(list) => { cmd.env(&k, list.join(",")); } + None => { cmd.env(&k, ""); } + } + } + + // Gather the set of native dependencies that this package has along with + // some other variables to close over. + // + // This information will be used at build-time later on to figure out which + // sorts of variables need to be discovered at that time. + let lib_deps = { + dependencies.iter().filter_map(|unit| { + if unit.profile.run_custom_build { + Some((unit.pkg.manifest().links().unwrap().to_string(), + unit.pkg.package_id().clone())) + } else { + None + } + }).collect::>() + }; + let pkg_name = unit.pkg.to_string(); + let build_state = Arc::clone(&cx.build_state); + let id = unit.pkg.package_id().clone(); + let (output_file, err_file) = { + let build_output_parent = build_output.parent().unwrap(); + let output_file = build_output_parent.join("output"); + let err_file = build_output_parent.join("stderr"); + (output_file, err_file) + }; + let all = (id.clone(), pkg_name.clone(), Arc::clone(&build_state), + output_file.clone()); + let build_scripts = super::load_build_deps(cx, unit); + let kind = unit.kind; + let json_messages = cx.build_config.json_messages; + + // Check to see if the build script has already run, and if it has keep + // track of whether it has told us about some explicit dependencies + let prev_output = BuildOutput::parse_file(&output_file, &pkg_name).ok(); + let deps = BuildDeps::new(&output_file, prev_output.as_ref()); + cx.build_explicit_deps.insert(*unit, deps); + + fs::create_dir_all(&script_output)?; + fs::create_dir_all(&build_output)?; + + let root_output = cx.target_root().to_path_buf(); + + // Prepare the unit of "dirty work" which will actually run the custom build + // command. + // + // Note that this has to do some extra work just before running the command + // to determine extra environment variables and such. + let dirty = Work::new(move |state| { + // Make sure that OUT_DIR exists. + // + // If we have an old build directory, then just move it into place, + // otherwise create it! + if fs::metadata(&build_output).is_err() { + fs::create_dir(&build_output).chain_err(|| { + internal("failed to create script output directory for \ + build command") + })?; + } + + // For all our native lib dependencies, pick up their metadata to pass + // along to this custom build command. We're also careful to augment our + // dynamic library search path in case the build script depended on any + // native dynamic libraries. + { + let build_state = build_state.outputs.lock().unwrap(); + for (name, id) in lib_deps { + let key = (id.clone(), kind); + let state = build_state.get(&key).ok_or_else(|| { + internal(format!("failed to locate build state for env \ + vars: {}/{:?}", id, kind)) + })?; + let data = &state.metadata; + for &(ref key, ref value) in data.iter() { + cmd.env(&format!("DEP_{}_{}", super::envify(&name), + super::envify(key)), value); + } + } + if let Some(build_scripts) = build_scripts { + super::add_plugin_deps(&mut cmd, &build_state, + &build_scripts, + &root_output)?; + } + } + + // And now finally, run the build command itself! + state.running(&cmd); + let output = cmd.exec_with_streaming( + &mut |out_line| { state.stdout(out_line); Ok(()) }, + &mut |err_line| { state.stderr(err_line); Ok(()) }, + true, + ).map_err(|e| { + CargoError::from( + format!("failed to run custom build command for `{}`\n{}", + pkg_name, e.description())) + + })?; + + + // After the build command has finished running, we need to be sure to + // remember all of its output so we can later discover precisely what it + // was, even if we don't run the build command again (due to freshness). + // + // This is also the location where we provide feedback into the build + // state informing what variables were discovered via our script as + // well. + paths::write(&output_file, &output.stdout)?; + paths::write(&err_file, &output.stderr)?; + let parsed_output = BuildOutput::parse(&output.stdout, &pkg_name)?; + + if json_messages { + let library_paths = parsed_output.library_paths.iter().map(|l| { + l.display().to_string() + }).collect::>(); + machine_message::emit(&machine_message::BuildScript { + package_id: &id, + linked_libs: &parsed_output.library_links, + linked_paths: &library_paths, + cfgs: &parsed_output.cfgs, + env: &parsed_output.env, + }); + } + + build_state.insert(id, kind, parsed_output); + Ok(()) + }); + + // Now that we've prepared our work-to-do, we need to prepare the fresh work + // itself to run when we actually end up just discarding what we calculated + // above. + let fresh = Work::new(move |_tx| { + let (id, pkg_name, build_state, output_file) = all; + let output = match prev_output { + Some(output) => output, + None => BuildOutput::parse_file(&output_file, &pkg_name)?, + }; + build_state.insert(id, kind, output); + Ok(()) + }); + + Ok((dirty, fresh)) +} + +impl BuildState { + pub fn new(config: &super::BuildConfig) -> BuildState { + let mut overrides = HashMap::new(); + let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); + let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); + for ((name, output), kind) in i1.chain(i2) { + overrides.insert((name.clone(), kind), output.clone()); + } + BuildState { + outputs: Mutex::new(HashMap::new()), + overrides: overrides, + } + } + + fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { + self.outputs.lock().unwrap().insert((id, kind), output); + } +} + +impl BuildOutput { + pub fn parse_file(path: &Path, pkg_name: &str) -> CargoResult { + let contents = paths::read_bytes(path)?; + BuildOutput::parse(&contents, pkg_name) + } + + // Parses the output of a script. + // The `pkg_name` is used for error messages. + pub fn parse(input: &[u8], pkg_name: &str) -> CargoResult { + let mut library_paths = Vec::new(); + let mut library_links = Vec::new(); + let mut cfgs = Vec::new(); + let mut env = Vec::new(); + let mut metadata = Vec::new(); + let mut rerun_if_changed = Vec::new(); + let mut rerun_if_env_changed = Vec::new(); + let mut warnings = Vec::new(); + let whence = format!("build script of `{}`", pkg_name); + + for line in input.split(|b| *b == b'\n') { + let line = match str::from_utf8(line) { + Ok(line) => line.trim(), + Err(..) => continue, + }; + let mut iter = line.splitn(2, ':'); + if iter.next() != Some("cargo") { + // skip this line since it doesn't start with "cargo:" + continue; + } + let data = match iter.next() { + Some(val) => val, + None => continue + }; + + // getting the `key=value` part of the line + let mut iter = data.splitn(2, '='); + let key = iter.next(); + let value = iter.next(); + let (key, value) = match (key, value) { + (Some(a), Some(b)) => (a, b.trim_right()), + // line started with `cargo:` but didn't match `key=value` + _ => bail!("Wrong output in {}: `{}`", whence, line), + }; + + match key { + "rustc-flags" => { + let (paths, links) = + BuildOutput::parse_rustc_flags(value, &whence)?; + library_links.extend(links.into_iter()); + library_paths.extend(paths.into_iter()); + } + "rustc-link-lib" => library_links.push(value.to_string()), + "rustc-link-search" => library_paths.push(PathBuf::from(value)), + "rustc-cfg" => cfgs.push(value.to_string()), + "rustc-env" => env.push(BuildOutput::parse_rustc_env(value, &whence)?), + "warning" => warnings.push(value.to_string()), + "rerun-if-changed" => rerun_if_changed.push(value.to_string()), + "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), + _ => metadata.push((key.to_string(), value.to_string())), + } + } + + Ok(BuildOutput { + library_paths: library_paths, + library_links: library_links, + cfgs: cfgs, + env: env, + metadata: metadata, + rerun_if_changed: rerun_if_changed, + rerun_if_env_changed: rerun_if_env_changed, + warnings: warnings, + }) + } + + pub fn parse_rustc_flags(value: &str, whence: &str) + -> CargoResult<(Vec, Vec)> { + let value = value.trim(); + let mut flags_iter = value.split(|c: char| c.is_whitespace()) + .filter(|w| w.chars().any(|c| !c.is_whitespace())); + let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); + while let Some(flag) = flags_iter.next() { + if flag != "-l" && flag != "-L" { + bail!("Only `-l` and `-L` flags are allowed in {}: `{}`", + whence, value) + } + let value = match flags_iter.next() { + Some(v) => v, + None => bail!("Flag in rustc-flags has no value in {}: `{}`", + whence, value) + }; + match flag { + "-l" => library_links.push(value.to_string()), + "-L" => library_paths.push(PathBuf::from(value)), + + // was already checked above + _ => bail!("only -l and -L flags are allowed") + }; + } + Ok((library_paths, library_links)) + } + + pub fn parse_rustc_env(value: &str, whence: &str) + -> CargoResult<(String, String)> { + let mut iter = value.splitn(2, '='); + let name = iter.next(); + let val = iter.next(); + match (name, val) { + (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())), + _ => bail!("Variable rustc-env has no value in {}: {}", whence, value), + } + } +} + +impl BuildDeps { + pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { + BuildDeps { + build_script_output: output_file.to_path_buf(), + rerun_if_changed: output.map(|p| &p.rerun_if_changed) + .cloned() + .unwrap_or_default(), + rerun_if_env_changed: output.map(|p| &p.rerun_if_env_changed) + .cloned() + .unwrap_or_default(), + } + } +} + +/// Compute the `build_scripts` map in the `Context` which tracks what build +/// scripts each package depends on. +/// +/// The global `build_scripts` map lists for all (package, kind) tuples what set +/// of packages' build script outputs must be considered. For example this lists +/// all dependencies' `-L` flags which need to be propagated transitively. +/// +/// The given set of targets to this function is the initial set of +/// targets/profiles which are being built. +pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, + units: &[Unit<'b>]) + -> CargoResult<()> { + let mut ret = HashMap::new(); + for unit in units { + build(&mut ret, cx, unit)?; + } + cx.build_scripts.extend(ret.into_iter().map(|(k, v)| { + (k, Arc::new(v)) + })); + return Ok(()); + + // Recursive function to build up the map we're constructing. This function + // memoizes all of its return values as it goes along. + fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>, + cx: &mut Context<'b, 'cfg>, + unit: &Unit<'b>) + -> CargoResult<&'a BuildScripts> { + // Do a quick pre-flight check to see if we've already calculated the + // set of dependencies. + if out.contains_key(unit) { + return Ok(&out[unit]) + } + + { + let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind)); + let build_state = &cx.build_state; + if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { + let key = (unit.pkg.package_id().clone(), unit.kind); + cx.build_script_overridden.insert(key.clone()); + build_state + .outputs + .lock() + .unwrap() + .insert(key, output.clone()); + } + } + + let mut ret = BuildScripts::default(); + + if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { + add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); + } + + // We want to invoke the compiler deterministically to be cache-friendly + // to rustc invocation caching schemes, so be sure to generate the same + // set of build script dependency orderings via sorting the targets that + // come out of the `Context`. + let mut targets = cx.dep_targets(unit)?; + targets.sort_by_key(|u| u.pkg.package_id()); + + for unit in targets.iter() { + let dep_scripts = build(out, cx, unit)?; + + if unit.target.for_host() { + ret.plugins.extend(dep_scripts.to_link.iter() + .map(|p| &p.0).cloned()); + } else if unit.target.linkable() { + for &(ref pkg, kind) in dep_scripts.to_link.iter() { + add_to_link(&mut ret, pkg, kind); + } + } + } + + let prev = out.entry(*unit).or_insert(BuildScripts::default()); + for (pkg, kind) in ret.to_link { + add_to_link(prev, &pkg, kind); + } + prev.plugins.extend(ret.plugins); + Ok(prev) + } + + // When adding an entry to 'to_link' we only actually push it on if the + // script hasn't seen it yet (e.g. we don't push on duplicates). + fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) { + if scripts.seen_to_link.insert((pkg.clone(), kind)) { + scripts.to_link.push((pkg.clone(), kind)); + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/fingerprint.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/fingerprint.rs new file mode 100644 index 000000000..62a53a857 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -0,0 +1,722 @@ +use std::env; +use std::fs::{self, File, OpenOptions}; +use std::hash::{self, Hasher}; +use std::io::prelude::*; +use std::io::{BufReader, SeekFrom}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; + +use filetime::FileTime; +use serde::ser::{self, Serialize}; +use serde::de::{self, Deserialize}; +use serde_json; + +use core::{Package, TargetKind}; +use util; +use util::{Fresh, Dirty, Freshness, internal, profile}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; + +use super::job::Work; +use super::context::{Context, Unit, TargetFileType}; +use super::custom_build::BuildDeps; + +/// A tuple result of the `prepare_foo` functions in this module. +/// +/// The first element of the triple is whether the target in question is +/// currently fresh or not, and the second two elements are work to perform when +/// the target is dirty or fresh, respectively. +/// +/// Both units of work are always generated because a fresh package may still be +/// rebuilt if some upstream dependency changes. +pub type Preparation = (Freshness, Work, Work); + +/// Prepare the necessary work for the fingerprint for a specific target. +/// +/// When dealing with fingerprints, cargo gets to choose what granularity +/// "freshness" is considered at. One option is considering freshness at the +/// package level. This means that if anything in a package changes, the entire +/// package is rebuilt, unconditionally. This simplicity comes at a cost, +/// however, in that test-only changes will cause libraries to be rebuilt, which +/// is quite unfortunate! +/// +/// The cost was deemed high enough that fingerprints are now calculated at the +/// layer of a target rather than a package. Each target can then be kept track +/// of separately and only rebuilt as necessary. This requires cargo to +/// understand what the inputs are to a target, so we drive rustc with the +/// --dep-info flag to learn about all input files to a unit of compilation. +/// +/// This function will calculate the fingerprint for a target and prepare the +/// work necessary to either write the fingerprint or copy over all fresh files +/// from the old directories to their new locations. +pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>) -> CargoResult { + let _p = profile::start(format!("fingerprint: {} / {}", + unit.pkg.package_id(), unit.target.name())); + let new = cx.fingerprint_dir(unit); + let loc = new.join(&filename(cx, unit)); + + debug!("fingerprint at: {}", loc.display()); + + let fingerprint = calculate(cx, unit)?; + let compare = compare_old_fingerprint(&loc, &*fingerprint); + log_compare(unit, &compare); + + // If our comparison failed (e.g. we're going to trigger a rebuild of this + // crate), then we also ensure the source of the crate passes all + // verification checks before we build it. + // + // The `Source::verify` method is intended to allow sources to execute + // pre-build checks to ensure that the relevant source code is all + // up-to-date and as expected. This is currently used primarily for + // directory sources which will use this hook to perform an integrity check + // on all files in the source to ensure they haven't changed. If they have + // changed then an error is issued. + if compare.is_err() { + let source_id = unit.pkg.package_id().source_id(); + let sources = cx.packages.sources(); + let source = sources.get(source_id).ok_or_else(|| { + internal("missing package source") + })?; + source.verify(unit.pkg.package_id())?; + } + + let root = cx.out_dir(unit); + let mut missing_outputs = false; + if unit.profile.doc { + missing_outputs = !root.join(unit.target.crate_name()) + .join("index.html").exists(); + } else { + for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() { + if file_type == TargetFileType::DebugInfo { + continue; + } + missing_outputs |= !src.exists(); + if let Some(ref link_dst) = *link_dst { + missing_outputs |= !link_dst.exists(); + } + } + } + + let allow_failure = unit.profile.rustc_args.is_some(); + let write_fingerprint = Work::new(move |_| { + match fingerprint.update_local() { + Ok(()) => {} + Err(..) if allow_failure => return Ok(()), + Err(e) => return Err(e) + } + write_fingerprint(&loc, &*fingerprint) + }); + + let fresh = compare.is_ok() && !missing_outputs; + Ok((if fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop())) +} + +/// A fingerprint can be considered to be a "short string" representing the +/// state of a world for a package. +/// +/// If a fingerprint ever changes, then the package itself needs to be +/// recompiled. Inputs to the fingerprint include source code modifications, +/// compiler flags, compiler version, etc. This structure is not simply a +/// `String` due to the fact that some fingerprints cannot be calculated lazily. +/// +/// Path sources, for example, use the mtime of the corresponding dep-info file +/// as a fingerprint (all source files must be modified *before* this mtime). +/// This dep-info file is not generated, however, until after the crate is +/// compiled. As a result, this structure can be thought of as a fingerprint +/// to-be. The actual value can be calculated via `hash()`, but the operation +/// may fail as some files may not have been generated. +/// +/// Note that dependencies are taken into account for fingerprints because rustc +/// requires that whenever an upstream crate is recompiled that all downstream +/// dependants are also recompiled. This is typically tracked through +/// `DependencyQueue`, but it also needs to be retained here because Cargo can +/// be interrupted while executing, losing the state of the `DependencyQueue` +/// graph. +#[derive(Serialize, Deserialize)] +pub struct Fingerprint { + rustc: u64, + features: String, + target: u64, + profile: u64, + #[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")] + deps: Vec<(String, Arc)>, + local: Vec, + #[serde(skip_serializing, skip_deserializing)] + memoized_hash: Mutex>, + rustflags: Vec, +} + +fn serialize_deps(deps: &[(String, Arc)], ser: S) + -> Result + where S: ser::Serializer, +{ + deps.iter().map(|&(ref a, ref b)| { + (a, b.hash()) + }).collect::>().serialize(ser) +} + +fn deserialize_deps<'de, D>(d: D) -> Result)>, D::Error> + where D: de::Deserializer<'de>, +{ + let decoded = >::deserialize(d)?; + Ok(decoded.into_iter().map(|(name, hash)| { + (name, Arc::new(Fingerprint { + rustc: 0, + target: 0, + profile: 0, + local: vec![LocalFingerprint::Precalculated(String::new())], + features: String::new(), + deps: Vec::new(), + memoized_hash: Mutex::new(Some(hash)), + rustflags: Vec::new(), + })) + }).collect()) +} + +#[derive(Serialize, Deserialize, Hash)] +enum LocalFingerprint { + Precalculated(String), + MtimeBased(MtimeSlot, PathBuf), + EnvBased(String, Option), +} + +struct MtimeSlot(Mutex>); + +impl Fingerprint { + fn update_local(&self) -> CargoResult<()> { + let mut hash_busted = false; + for local in self.local.iter() { + match *local { + LocalFingerprint::MtimeBased(ref slot, ref path) => { + let meta = fs::metadata(path) + .chain_err(|| { + internal(format!("failed to stat `{}`", path.display())) + })?; + let mtime = FileTime::from_last_modification_time(&meta); + *slot.0.lock().unwrap() = Some(mtime); + } + LocalFingerprint::EnvBased(..) | + LocalFingerprint::Precalculated(..) => continue, + } + hash_busted = true; + } + + if hash_busted { + *self.memoized_hash.lock().unwrap() = None; + } + Ok(()) + } + + fn hash(&self) -> u64 { + if let Some(s) = *self.memoized_hash.lock().unwrap() { + return s + } + let ret = util::hash_u64(self); + *self.memoized_hash.lock().unwrap() = Some(ret); + ret + } + + fn compare(&self, old: &Fingerprint) -> CargoResult<()> { + if self.rustc != old.rustc { + bail!("rust compiler has changed") + } + if self.features != old.features { + bail!("features have changed: {} != {}", self.features, old.features) + } + if self.target != old.target { + bail!("target configuration has changed") + } + if self.profile != old.profile { + bail!("profile configuration has changed") + } + if self.rustflags != old.rustflags { + return Err(internal("RUSTFLAGS has changed")) + } + if self.local.len() != old.local.len() { + bail!("local lens changed"); + } + for (new, old) in self.local.iter().zip(&old.local) { + match (new, old) { + (&LocalFingerprint::Precalculated(ref a), + &LocalFingerprint::Precalculated(ref b)) => { + if a != b { + bail!("precalculated components have changed: {} != {}", + a, b) + } + } + (&LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap), + &LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp)) => { + let on_disk_mtime = on_disk_mtime.0.lock().unwrap(); + let previously_built_mtime = previously_built_mtime.0.lock().unwrap(); + + let should_rebuild = match (*on_disk_mtime, *previously_built_mtime) { + (None, None) => false, + (Some(_), None) | (None, Some(_)) => true, + (Some(on_disk), Some(previously_built)) => on_disk > previously_built, + }; + + if should_rebuild { + bail!("mtime based components have changed: previously {:?} now {:?}, \ + paths are {:?} and {:?}", + *previously_built_mtime, *on_disk_mtime, ap, bp) + } + } + (&LocalFingerprint::EnvBased(ref akey, ref avalue), + &LocalFingerprint::EnvBased(ref bkey, ref bvalue)) => { + if *akey != *bkey { + bail!("env vars changed: {} != {}", akey, bkey); + } + if *avalue != *bvalue { + bail!("env var `{}` changed: previously {:?} now {:?}", + akey, bvalue, avalue) + } + } + _ => bail!("local fingerprint type has changed"), + } + } + + if self.deps.len() != old.deps.len() { + bail!("number of dependencies has changed") + } + for (a, b) in self.deps.iter().zip(old.deps.iter()) { + if a.1.hash() != b.1.hash() { + bail!("new ({}) != old ({})", a.0, b.0) + } + } + Ok(()) + } +} + +impl hash::Hash for Fingerprint { + fn hash(&self, h: &mut H) { + let Fingerprint { + rustc, + ref features, + target, + profile, + ref deps, + ref local, + memoized_hash: _, + ref rustflags, + } = *self; + (rustc, features, target, profile, local, rustflags).hash(h); + + h.write_usize(deps.len()); + for &(ref name, ref fingerprint) in deps { + name.hash(h); + // use memoized dep hashes to avoid exponential blowup + h.write_u64(Fingerprint::hash(fingerprint)); + } + } +} + +impl hash::Hash for MtimeSlot { + fn hash(&self, h: &mut H) { + self.0.lock().unwrap().hash(h) + } +} + +impl ser::Serialize for MtimeSlot { + fn serialize(&self, s: S) -> Result + where S: ser::Serializer, + { + self.0.lock().unwrap().map(|ft| { + (ft.seconds_relative_to_1970(), ft.nanoseconds()) + }).serialize(s) + } +} + +impl<'de> de::Deserialize<'de> for MtimeSlot { + fn deserialize(d: D) -> Result + where D: de::Deserializer<'de>, + { + let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?; + Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| { + FileTime::from_seconds_since_1970(s, n) + })))) + } +} + +/// Calculates the fingerprint for a package/target pair. +/// +/// This fingerprint is used by Cargo to learn about when information such as: +/// +/// * A non-path package changes (changes version, changes revision, etc). +/// * Any dependency changes +/// * The compiler changes +/// * The set of features a package is built with changes +/// * The profile a target is compiled with changes (e.g. opt-level changes) +/// +/// Information like file modification time is only calculated for path +/// dependencies and is calculated in `calculate_target_fresh`. +fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) + -> CargoResult> { + if let Some(s) = cx.fingerprints.get(unit) { + return Ok(Arc::clone(s)) + } + + // Next, recursively calculate the fingerprint for all of our dependencies. + // + // Skip the fingerprints of build scripts as they may not always be + // available and the dirtiness propagation for modification is tracked + // elsewhere. Also skip fingerprints of binaries because they don't actually + // induce a recompile, they're just dependencies in the sense that they need + // to be built. + let deps = cx.dep_targets(unit)?; + let deps = deps.iter().filter(|u| { + !u.target.is_custom_build() && !u.target.is_bin() + }).map(|unit| { + calculate(cx, unit).map(|fingerprint| { + (unit.pkg.package_id().to_string(), fingerprint) + }) + }).collect::>>()?; + + // And finally, calculate what our own local fingerprint is + let local = if use_dep_info(unit) { + let dep_info = dep_info_loc(cx, unit); + let mtime = dep_info_mtime_if_fresh(&dep_info)?; + LocalFingerprint::MtimeBased(MtimeSlot(Mutex::new(mtime)), dep_info) + } else { + let fingerprint = pkg_fingerprint(cx, unit.pkg)?; + LocalFingerprint::Precalculated(fingerprint) + }; + let mut deps = deps; + deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); + let extra_flags = if unit.profile.doc { + cx.rustdocflags_args(unit)? + } else { + cx.rustflags_args(unit)? + }; + let fingerprint = Arc::new(Fingerprint { + rustc: util::hash_u64(&cx.config.rustc()?.verbose_version), + target: util::hash_u64(&unit.target), + profile: util::hash_u64(&unit.profile), + features: format!("{:?}", cx.resolve.features_sorted(unit.pkg.package_id())), + deps: deps, + local: vec![local], + memoized_hash: Mutex::new(None), + rustflags: extra_flags, + }); + cx.fingerprints.insert(*unit, Arc::clone(&fingerprint)); + Ok(fingerprint) +} + + +// We want to use the mtime for files if we're a path source, but if we're a +// git/registry source, then the mtime of files may fluctuate, but they won't +// change so long as the source itself remains constant (which is the +// responsibility of the source) +fn use_dep_info(unit: &Unit) -> bool { + let path = unit.pkg.summary().source_id().is_path(); + !unit.profile.doc && path +} + +/// Prepare the necessary work for the fingerprint of a build command. +/// +/// Build commands are located on packages, not on targets. Additionally, we +/// don't have --dep-info to drive calculation of the fingerprint of a build +/// command. This brings up an interesting predicament which gives us a few +/// options to figure out whether a build command is dirty or not: +/// +/// 1. A build command is dirty if *any* file in a package changes. In theory +/// all files are candidate for being used by the build command. +/// 2. A build command is dirty if any file in a *specific directory* changes. +/// This may lose information as it may require files outside of the specific +/// directory. +/// 3. A build command must itself provide a dep-info-like file stating how it +/// should be considered dirty or not. +/// +/// The currently implemented solution is option (1), although it is planned to +/// migrate to option (2) in the near future. +pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) + -> CargoResult { + let _p = profile::start(format!("fingerprint build cmd: {}", + unit.pkg.package_id())); + let new = cx.fingerprint_dir(unit); + let loc = new.join("build"); + + debug!("fingerprint at: {}", loc.display()); + + let (local, output_path) = build_script_local_fingerprints(cx, unit)?; + let mut fingerprint = Fingerprint { + rustc: 0, + target: 0, + profile: 0, + features: String::new(), + deps: Vec::new(), + local: local, + memoized_hash: Mutex::new(None), + rustflags: Vec::new(), + }; + let compare = compare_old_fingerprint(&loc, &fingerprint); + log_compare(unit, &compare); + + // When we write out the fingerprint, we may want to actually change the + // kind of fingerprint being recorded. If we started out, then the previous + // run of the build script (or if it had never run before) may indicate to + // use the `Precalculated` variant with the `pkg_fingerprint`. If the build + // script then prints `rerun-if-changed`, however, we need to record what's + // necessary for that fingerprint. + // + // Hence, if there were some `rerun-if-changed` directives forcibly change + // the kind of fingerprint by reinterpreting the dependencies output by the + // build script. + let state = Arc::clone(&cx.build_state); + let key = (unit.pkg.package_id().clone(), unit.kind); + let root = unit.pkg.root().to_path_buf(); + let write_fingerprint = Work::new(move |_| { + if let Some(output_path) = output_path { + let outputs = state.outputs.lock().unwrap(); + let outputs = &outputs[&key]; + if !outputs.rerun_if_changed.is_empty() || + !outputs.rerun_if_env_changed.is_empty() { + let deps = BuildDeps::new(&output_path, Some(outputs)); + fingerprint.local = local_fingerprints_deps(&deps, &root); + fingerprint.update_local()?; + } + } + write_fingerprint(&loc, &fingerprint) + }); + + Ok((if compare.is_ok() {Fresh} else {Dirty}, write_fingerprint, Work::noop())) +} + +fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>) + -> CargoResult<(Vec, Option)> +{ + let state = cx.build_state.outputs.lock().unwrap(); + // First up, if this build script is entirely overridden, then we just + // return the hash of what we overrode it with. + // + // Note that the `None` here means that we don't want to update the local + // fingerprint afterwards because this is all just overridden. + if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) { + debug!("override local fingerprints deps"); + let s = format!("overridden build state with hash: {}", + util::hash_u64(output)); + return Ok((vec![LocalFingerprint::Precalculated(s)], None)) + } + + // Next up we look at the previously listed dependencies for the build + // script. If there are none then we're in the "old mode" where we just + // assume that we're changed if anything in the packaged changed. The + // `Some` here though means that we want to update our local fingerprints + // after we're done as running this build script may have created more + // dependencies. + let deps = &cx.build_explicit_deps[unit]; + let output = deps.build_script_output.clone(); + if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { + debug!("old local fingerprints deps"); + let s = pkg_fingerprint(cx, unit.pkg)?; + return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output))) + } + + // Ok so now we're in "new mode" where we can have files listed as + // dependencies as well as env vars listed as dependencies. Process them all + // here. + Ok((local_fingerprints_deps(deps, unit.pkg.root()), Some(output))) +} + +fn local_fingerprints_deps(deps: &BuildDeps, root: &Path) -> Vec { + debug!("new local fingerprints deps"); + let mut local = Vec::new(); + if !deps.rerun_if_changed.is_empty() { + let output = &deps.build_script_output; + let deps = deps.rerun_if_changed.iter().map(|p| root.join(p)); + let mtime = mtime_if_fresh(output, deps); + let mtime = MtimeSlot(Mutex::new(mtime)); + local.push(LocalFingerprint::MtimeBased(mtime, output.clone())); + } + + for var in deps.rerun_if_env_changed.iter() { + let val = env::var(var).ok(); + local.push(LocalFingerprint::EnvBased(var.clone(), val)); + } + + local +} + +fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { + let hash = fingerprint.hash(); + debug!("write fingerprint: {}", loc.display()); + paths::write(loc, util::to_hex(hash).as_bytes())?; + paths::write(&loc.with_extension("json"), + &serde_json::to_vec(&fingerprint).unwrap())?; + Ok(()) +} + +/// Prepare for work when a package starts to build +pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> { + let new1 = cx.fingerprint_dir(unit); + + if fs::metadata(&new1).is_err() { + fs::create_dir(&new1)?; + } + + Ok(()) +} + +pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { + cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit))) +} + +fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) + -> CargoResult<()> { + let old_fingerprint_short = paths::read(loc)?; + let new_hash = new_fingerprint.hash(); + + if util::to_hex(new_hash) == old_fingerprint_short { + return Ok(()) + } + + let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; + let old_fingerprint = serde_json::from_str(&old_fingerprint_json) + .chain_err(|| internal("failed to deserialize json"))?; + new_fingerprint.compare(&old_fingerprint) +} + +fn log_compare(unit: &Unit, compare: &CargoResult<()>) { + let ce = match *compare { + Ok(..) => return, + Err(ref e) => e, + }; + info!("fingerprint error for {}: {}", unit.pkg, ce); + + for cause in ce.iter() { + info!(" cause: {}", cause); + } +} + +// Parse the dep-info into a list of paths +pub fn parse_dep_info(dep_info: &Path) -> CargoResult>> { + macro_rules! fs_try { + ($e:expr) => (match $e { Ok(e) => e, Err(..) => return Ok(None) }) + } + let mut f = BufReader::new(fs_try!(File::open(dep_info))); + // see comments in append_current_dir for where this cwd is manifested from. + let mut cwd = Vec::new(); + if fs_try!(f.read_until(0, &mut cwd)) == 0 { + return Ok(None) + } + let cwd = util::bytes2path(&cwd[..cwd.len()-1])?; + let line = match f.lines().next() { + Some(Ok(line)) => line, + _ => return Ok(None), + }; + let pos = line.find(": ").ok_or_else(|| { + internal(format!("dep-info not in an understood format: {}", + dep_info.display())) + })?; + let deps = &line[pos + 2..]; + + let mut paths = Vec::new(); + let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()); + while let Some(s) = deps.next() { + let mut file = s.to_string(); + while file.ends_with('\\') { + file.pop(); + file.push(' '); + file.push_str(deps.next().ok_or_else(|| { + internal("malformed dep-info format, trailing \\".to_string()) + })?); + } + paths.push(cwd.join(&file)); + } + Ok(Some(paths)) +} + +fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { + if let Some(paths) = parse_dep_info(dep_info)? { + Ok(mtime_if_fresh(dep_info, paths.iter())) + } else { + Ok(None) + } +} + +fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { + let source_id = pkg.package_id().source_id(); + let sources = cx.packages.sources(); + + let source = sources.get(source_id).ok_or_else(|| { + internal("missing package source") + })?; + source.fingerprint(pkg) +} + +fn mtime_if_fresh(output: &Path, paths: I) -> Option + where I: IntoIterator, + I::Item: AsRef, +{ + let meta = match fs::metadata(output) { + Ok(meta) => meta, + Err(..) => return None, + }; + let mtime = FileTime::from_last_modification_time(&meta); + + let any_stale = paths.into_iter().any(|path| { + let path = path.as_ref(); + let meta = match fs::metadata(path) { + Ok(meta) => meta, + Err(..) => { + info!("stale: {} -- missing", path.display()); + return true + } + }; + let mtime2 = FileTime::from_last_modification_time(&meta); + if mtime2 > mtime { + info!("stale: {} -- {} vs {}", path.display(), mtime2, mtime); + true + } else { + false + } + }); + + if any_stale { + None + } else { + Some(mtime) + } +} + +fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { + // file_stem includes metadata hash. Thus we have a different + // fingerprint for every metadata hash version. This works because + // even if the package is fresh, we'll still link the fresh target + let file_stem = cx.file_stem(unit); + let kind = match *unit.target.kind() { + TargetKind::Lib(..) => "lib", + TargetKind::Bin => "bin", + TargetKind::Test => "integration-test", + TargetKind::ExampleBin | + TargetKind::ExampleLib(..) => "example", + TargetKind::Bench => "bench", + TargetKind::CustomBuild => "build-script", + }; + let flavor = if unit.profile.test { + "test-" + } else if unit.profile.doc { + "doc-" + } else { + "" + }; + format!("{}{}-{}", flavor, kind, file_stem) +} + +// The dep-info files emitted by the compiler all have their listed paths +// relative to whatever the current directory was at the time that the compiler +// was invoked. As the current directory may change over time, we need to record +// what that directory was at the beginning of the file so we can know about it +// next time. +pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> { + debug!("appending {} <- {}", path.display(), cwd.display()); + let mut f = OpenOptions::new().read(true).write(true).open(path)?; + let mut contents = Vec::new(); + f.read_to_end(&mut contents)?; + f.seek(SeekFrom::Start(0))?; + f.write_all(util::path2bytes(cwd)?)?; + f.write_all(&[0])?; + f.write_all(&contents)?; + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job.rs new file mode 100644 index 000000000..219a6d437 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job.rs @@ -0,0 +1,67 @@ +use std::fmt; + +use util::{CargoResult, Fresh, Dirty, Freshness}; +use super::job_queue::JobState; + +pub struct Job { dirty: Work, fresh: Work } + +/// Each proc should send its description before starting. +/// It should send either once or close immediately. +pub struct Work { + inner: Box FnBox<&'a JobState<'b>, CargoResult<()>> + Send>, +} + +trait FnBox { + fn call_box(self: Box, a: A) -> R; +} + +impl R> FnBox for F { + fn call_box(self: Box, a: A) -> R { + (*self)(a) + } +} + +impl Work { + pub fn new(f: F) -> Work + where F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static + { + Work { inner: Box::new(f) } + } + + pub fn noop() -> Work { + Work::new(|_| Ok(())) + } + + pub fn call(self, tx: &JobState) -> CargoResult<()> { + self.inner.call_box(tx) + } + + pub fn then(self, next: Work) -> Work { + Work::new(move |state| { + self.call(state)?; + next.call(state) + }) + } +} + +impl Job { + /// Create a new job representing a unit of work. + pub fn new(dirty: Work, fresh: Work) -> Job { + Job { dirty: dirty, fresh: fresh } + } + + /// Consumes this job by running it, returning the result of the + /// computation. + pub fn run(self, fresh: Freshness, state: &JobState) -> CargoResult<()> { + match fresh { + Fresh => self.fresh.call(state), + Dirty => self.dirty.call(state), + } + } +} + +impl fmt::Debug for Job { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Job {{ ... }}") + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job_queue.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job_queue.rs new file mode 100644 index 000000000..5bfc5d458 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/job_queue.rs @@ -0,0 +1,426 @@ +use std::collections::HashSet; +use std::collections::hash_map::HashMap; +use std::fmt; +use std::io; +use std::mem; +use std::sync::mpsc::{channel, Sender, Receiver}; + +use crossbeam::{self, Scope}; +use jobserver::{Acquired, HelperThread}; + +use core::{PackageId, Target, Profile}; +use util::{Config, DependencyQueue, Fresh, Dirty, Freshness}; +use util::{CargoResult, ProcessBuilder, profile, internal, CargoResultExt}; +use {handle_error}; + +use super::{Context, Kind, Unit}; +use super::job::Job; + +/// A management structure of the entire dependency graph to compile. +/// +/// This structure is backed by the `DependencyQueue` type and manages the +/// actual compilation step of each package. Packages enqueue units of work and +/// then later on the entire graph is processed and compiled. +pub struct JobQueue<'a> { + queue: DependencyQueue, Vec<(Job, Freshness)>>, + tx: Sender>, + rx: Receiver>, + active: usize, + pending: HashMap, PendingBuild>, + compiled: HashSet<&'a PackageId>, + documented: HashSet<&'a PackageId>, + counts: HashMap<&'a PackageId, usize>, + is_release: bool, +} + +/// A helper structure for metadata about the state of a building package. +struct PendingBuild { + /// Number of jobs currently active + amt: usize, + /// Current freshness state of this package. Any dirty target within a + /// package will cause the entire package to become dirty. + fresh: Freshness, +} + +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +struct Key<'a> { + pkg: &'a PackageId, + target: &'a Target, + profile: &'a Profile, + kind: Kind, +} + +pub struct JobState<'a> { + tx: Sender>, +} + +enum Message<'a> { + Run(String), + Stdout(String), + Stderr(String), + Token(io::Result), + Finish(Key<'a>, CargoResult<()>), +} + +impl<'a> JobState<'a> { + pub fn running(&self, cmd: &ProcessBuilder) { + let _ = self.tx.send(Message::Run(cmd.to_string())); + } + + pub fn stdout(&self, out: &str) { + let _ = self.tx.send(Message::Stdout(out.to_string())); + } + + pub fn stderr(&self, err: &str) { + let _ = self.tx.send(Message::Stderr(err.to_string())); + } +} + +impl<'a> JobQueue<'a> { + pub fn new<'cfg>(cx: &Context<'a, 'cfg>) -> JobQueue<'a> { + let (tx, rx) = channel(); + JobQueue { + queue: DependencyQueue::new(), + tx: tx, + rx: rx, + active: 0, + pending: HashMap::new(), + compiled: HashSet::new(), + documented: HashSet::new(), + counts: HashMap::new(), + is_release: cx.build_config.release, + } + } + + pub fn enqueue<'cfg>(&mut self, + cx: &Context<'a, 'cfg>, + unit: &Unit<'a>, + job: Job, + fresh: Freshness) -> CargoResult<()> { + let key = Key::new(unit); + let deps = key.dependencies(cx)?; + self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh)); + *self.counts.entry(key.pkg).or_insert(0) += 1; + Ok(()) + } + + /// Execute all jobs necessary to build the dependency graph. + /// + /// This function will spawn off `config.jobs()` workers to build all of the + /// necessary dependencies, in order. Freshness is propagated as far as + /// possible along each dependency chain. + pub fn execute(&mut self, cx: &mut Context) -> CargoResult<()> { + let _p = profile::start("executing the job graph"); + + // We need to give a handle to the send half of our message queue to the + // jobserver helper thread. Unfortunately though we need the handle to be + // `'static` as that's typically what's required when spawning a + // thread! + // + // To work around this we transmute the `Sender` to a static lifetime. + // we're only sending "longer living" messages and we should also + // destroy all references to the channel before this function exits as + // the destructor for the `helper` object will ensure the associated + // thread is no longer running. + // + // As a result, this `transmute` to a longer lifetime should be safe in + // practice. + let tx = self.tx.clone(); + let tx = unsafe { + mem::transmute::>, Sender>>(tx) + }; + let helper = cx.jobserver.clone().into_helper_thread(move |token| { + drop(tx.send(Message::Token(token))); + }).chain_err(|| { + "failed to create helper thread for jobserver management" + })?; + + crossbeam::scope(|scope| { + self.drain_the_queue(cx, scope, &helper) + }) + } + + fn drain_the_queue(&mut self, + cx: &mut Context, + scope: &Scope<'a>, + jobserver_helper: &HelperThread) + -> CargoResult<()> { + use std::time::Instant; + + let mut tokens = Vec::new(); + let mut queue = Vec::new(); + trace!("queue: {:#?}", self.queue); + + // Iteratively execute the entire dependency graph. Each turn of the + // loop starts out by scheduling as much work as possible (up to the + // maximum number of parallel jobs we have tokens for). A local queue + // is maintained separately from the main dependency queue as one + // dequeue may actually dequeue quite a bit of work (e.g. 10 binaries + // in one project). + // + // After a job has finished we update our internal state if it was + // successful and otherwise wait for pending work to finish if it failed + // and then immediately return. + let mut error = None; + let start_time = Instant::now(); + loop { + // Dequeue as much work as we can, learning about everything + // possible that can run. Note that this is also the point where we + // start requesting job tokens. Each job after the first needs to + // request a token. + while let Some((fresh, key, jobs)) = self.queue.dequeue() { + let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| { + f.combine(fresh) + }); + self.pending.insert(key, PendingBuild { + amt: jobs.len(), + fresh: total_fresh, + }); + for (job, f) in jobs { + queue.push((key, job, f.combine(fresh))); + if self.active + queue.len() > 0 { + jobserver_helper.request_token(); + } + } + } + + // Now that we've learned of all possible work that we can execute + // try to spawn it so long as we've got a jobserver token which says + // we're able to perform some parallel work. + while error.is_none() && self.active < tokens.len() + 1 && !queue.is_empty() { + let (key, job, fresh) = queue.remove(0); + self.run(key, fresh, job, cx.config, scope)?; + } + + // If after all that we're not actually running anything then we're + // done! + if self.active == 0 { + break + } + + // And finally, before we block waiting for the next event, drop any + // excess tokens we may have accidentally acquired. Due to how our + // jobserver interface is architected we may acquire a token that we + // don't actually use, and if this happens just relinquish it back + // to the jobserver itself. + tokens.truncate(self.active - 1); + + match self.rx.recv().unwrap() { + Message::Run(cmd) => { + cx.config.shell().verbose(|c| c.status("Running", &cmd))?; + } + Message::Stdout(out) => { + if cx.config.extra_verbose() { + println!("{}", out); + } + } + Message::Stderr(err) => { + if cx.config.extra_verbose() { + writeln!(cx.config.shell().err(), "{}", err)?; + } + } + Message::Finish(key, result) => { + info!("end: {:?}", key); + self.active -= 1; + if self.active > 0 { + assert!(!tokens.is_empty()); + drop(tokens.pop()); + } + match result { + Ok(()) => self.finish(key, cx)?, + Err(e) => { + let msg = "The following warnings were emitted during compilation:"; + self.emit_warnings(Some(msg), key, cx)?; + + if self.active > 0 { + error = Some("build failed".into()); + handle_error(e, &mut *cx.config.shell()); + cx.config.shell().warn( + "build failed, waiting for other \ + jobs to finish...")?; + } + else { + error = Some(e); + } + } + } + } + Message::Token(acquired_token) => { + tokens.push(acquired_token.chain_err(|| { + "failed to acquire jobserver token" + })?); + } + } + } + + let build_type = if self.is_release { "release" } else { "dev" }; + let profile = cx.lib_profile(); + let mut opt_type = String::from(if profile.opt_level == "0" { "unoptimized" } + else { "optimized" }); + if profile.debuginfo.is_some() { + opt_type += " + debuginfo"; + } + let duration = start_time.elapsed(); + let time_elapsed = format!("{}.{1:.2} secs", + duration.as_secs(), + duration.subsec_nanos() / 10_000_000); + if self.queue.is_empty() { + let message = format!("{} [{}] target(s) in {}", + build_type, + opt_type, + time_elapsed); + cx.config.shell().status("Finished", message)?; + Ok(()) + } else if let Some(e) = error { + Err(e) + } else { + debug!("queue: {:#?}", self.queue); + Err(internal("finished with jobs still left in the queue")) + } + } + + /// Executes a job in the `scope` given, pushing the spawned thread's + /// handled onto `threads`. + fn run(&mut self, + key: Key<'a>, + fresh: Freshness, + job: Job, + config: &Config, + scope: &Scope<'a>) -> CargoResult<()> { + info!("start: {:?}", key); + + self.active += 1; + *self.counts.get_mut(key.pkg).unwrap() -= 1; + + let my_tx = self.tx.clone(); + let doit = move || { + let res = job.run(fresh, &JobState { + tx: my_tx.clone(), + }); + my_tx.send(Message::Finish(key, res)).unwrap(); + }; + match fresh { + Freshness::Fresh => doit(), + Freshness::Dirty => { scope.spawn(doit); } + } + + // Print out some nice progress information + self.note_working_on(config, &key, fresh)?; + + Ok(()) + } + + fn emit_warnings(&self, msg: Option<&str>, key: Key<'a>, cx: &mut Context) -> CargoResult<()> { + let output = cx.build_state.outputs.lock().unwrap(); + if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) { + if let Some(msg) = msg { + if !output.warnings.is_empty() { + writeln!(cx.config.shell().err(), "{}\n", msg)?; + } + } + + for warning in output.warnings.iter() { + cx.config.shell().warn(warning)?; + } + + if !output.warnings.is_empty() && msg.is_some() { + // Output an empty line. + writeln!(cx.config.shell().err(), "")?; + } + } + + Ok(()) + } + + fn finish(&mut self, key: Key<'a>, cx: &mut Context) -> CargoResult<()> { + if key.profile.run_custom_build && cx.show_warnings(key.pkg) { + self.emit_warnings(None, key, cx)?; + } + + let state = self.pending.get_mut(&key).unwrap(); + state.amt -= 1; + if state.amt == 0 { + self.queue.finish(&key, state.fresh); + } + Ok(()) + } + + // This isn't super trivial because we don't want to print loads and + // loads of information to the console, but we also want to produce a + // faithful representation of what's happening. This is somewhat nuanced + // as a package can start compiling *very* early on because of custom + // build commands and such. + // + // In general, we try to print "Compiling" for the first nontrivial task + // run for a package, regardless of when that is. We then don't print + // out any more information for a package after we've printed it once. + fn note_working_on(&mut self, + config: &Config, + key: &Key<'a>, + fresh: Freshness) -> CargoResult<()> { + if (self.compiled.contains(key.pkg) && !key.profile.doc) || + (self.documented.contains(key.pkg) && key.profile.doc) { + return Ok(()) + } + + match fresh { + // Any dirty stage which runs at least one command gets printed as + // being a compiled package + Dirty => { + if key.profile.doc { + if !key.profile.test { + self.documented.insert(key.pkg); + config.shell().status("Documenting", key.pkg)?; + } + } else { + self.compiled.insert(key.pkg); + config.shell().status("Compiling", key.pkg)?; + } + } + Fresh if self.counts[key.pkg] == 0 => { + self.compiled.insert(key.pkg); + config.shell().verbose(|c| c.status("Fresh", key.pkg))?; + } + Fresh => {} + } + Ok(()) + } +} + +impl<'a> Key<'a> { + fn new(unit: &Unit<'a>) -> Key<'a> { + Key { + pkg: unit.pkg.package_id(), + target: unit.target, + profile: unit.profile, + kind: unit.kind, + } + } + + fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) + -> CargoResult>> { + let unit = Unit { + pkg: cx.get_package(self.pkg)?, + target: self.target, + profile: self.profile, + kind: self.kind, + }; + let targets = cx.dep_targets(&unit)?; + Ok(targets.iter().filter_map(|unit| { + // Binaries aren't actually needed to *compile* tests, just to run + // them, so we don't include this dependency edge in the job graph. + if self.target.is_test() && unit.target.is_bin() { + None + } else { + Some(Key::new(unit)) + } + }).collect()) + } +} + +impl<'a> fmt::Debug for Key<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile, + self.kind) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/layout.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/layout.rs new file mode 100644 index 000000000..464a68945 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/layout.rs @@ -0,0 +1,191 @@ +//! Management of the directory layout of a build +//! +//! The directory layout is a little tricky at times, hence a separate file to +//! house this logic. The current layout looks like this: +//! +//! ```ignore +//! # This is the root directory for all output, the top-level package +//! # places all of its output here. +//! target/ +//! +//! # This is the root directory for all output of *dependencies* +//! deps/ +//! +//! # Root directory for all compiled examples +//! examples/ +//! +//! # This is the location at which the output of all custom build +//! # commands are rooted +//! build/ +//! +//! # Each package gets its own directory where its build script and +//! # script output are placed +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Each directory package has a `out` directory where output +//! # is placed. +//! out/ +//! +//! # This is the location at which the output of all old custom build +//! # commands are rooted +//! native/ +//! +//! # Each package gets its own directory for where its output is +//! # placed. We can't track exactly what's getting put in here, so +//! # we just assume that all relevant output is in these +//! # directories. +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Directory used to store incremental data for the compiler (when +//! # incremental is enabled. +//! incremental/ +//! +//! # Hidden directory that holds all of the fingerprint files for all +//! # packages +//! .fingerprint/ +//! ``` + +use std::fs; +use std::io; +use std::path::{PathBuf, Path}; + +use core::Workspace; +use util::{Config, FileLock, CargoResult, Filesystem}; + +/// Contains the paths of all target output locations. +/// +/// See module docs for more information. +pub struct Layout { + root: PathBuf, + deps: PathBuf, + native: PathBuf, + build: PathBuf, + incremental: PathBuf, + fingerprint: PathBuf, + examples: PathBuf, + /// The lockfile for a build, will be unlocked when this struct is `drop`ped. + _lock: FileLock, +} + +pub fn is_bad_artifact_name(name: &str) -> bool { + ["deps", "examples", "build", "native", "incremental"] + .iter() + .any(|&reserved| reserved == name) +} + +impl Layout { + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + /// + /// Differs from `at` in that this calculates the root path from the workspace target directory, + /// adding the target triple and the profile (debug, release, ...). + pub fn new(ws: &Workspace, + triple: Option<&str>, + dest: &str) -> CargoResult { + let mut path = ws.target_dir(); + // Flexible target specifications often point at filenames, so interpret + // the target triple as a Path and then just use the file stem as the + // component for the directory name. + if let Some(triple) = triple { + path.push(Path::new(triple).file_stem().ok_or_else(|| "target was empty")?); + } + path.push(dest); + Layout::at(ws.config(), path) + } + + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // For now we don't do any more finer-grained locking on the artifact + // directory, so just lock the entire thing for the duration of this + // compile. + let lock = root.open_rw(".cargo-lock", config, "build directory")?; + let root = root.into_path_unlocked(); + + Ok(Layout { + deps: root.join("deps"), + native: root.join("native"), + build: root.join("build"), + incremental: root.join("incremental"), + fingerprint: root.join(".fingerprint"), + examples: root.join("examples"), + root: root, + _lock: lock, + }) + } + + #[cfg(not(target_os = "macos"))] + fn exclude_from_backups(&self, _: &Path) {} + + #[cfg(target_os = "macos")] + /// Marks files or directories as excluded from Time Machine on macOS + /// + /// This is recommended to prevent derived/temporary files from bloating backups. + fn exclude_from_backups(&self, path: &Path) { + use std::ptr; + use core_foundation::{url, number, string}; + use core_foundation::base::TCFType; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + match (url::CFURL::from_path(path, false), is_excluded_key) { + (Some(path), Ok(is_excluded_key)) => unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + }, + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working + _ => {} + } + } + + /// Make sure all directories stored in the Layout exist on the filesystem. + pub fn prepare(&mut self) -> io::Result<()> { + if fs::metadata(&self.root).is_err() { + fs::create_dir_all(&self.root)?; + } + + self.exclude_from_backups(&self.root); + + mkdir(&self.deps)?; + mkdir(&self.native)?; + mkdir(&self.incremental)?; + mkdir(&self.fingerprint)?; + mkdir(&self.examples)?; + mkdir(&self.build)?; + + return Ok(()); + + fn mkdir(dir: &Path) -> io::Result<()> { + if fs::metadata(&dir).is_err() { + fs::create_dir(dir)?; + } + Ok(()) + } + } + + /// Fetch the root path. + pub fn dest(&self) -> &Path { &self.root } + /// Fetch the deps path. + pub fn deps(&self) -> &Path { &self.deps } + /// Fetch the examples path. + pub fn examples(&self) -> &Path { &self.examples } + /// Fetch the root path. + pub fn root(&self) -> &Path { &self.root } + /// Fetch the incremental path. + pub fn incremental(&self) -> &Path { &self.incremental } + /// Fetch the fingerprint path. + pub fn fingerprint(&self) -> &Path { &self.fingerprint } + /// Fetch the build path. + pub fn build(&self) -> &Path { &self.build } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/links.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/links.rs new file mode 100644 index 000000000..79bb240cf --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/links.rs @@ -0,0 +1,64 @@ +use std::collections::{HashMap, HashSet}; +use std::fmt::Write; + +use core::{Resolve, PackageId}; +use util::CargoResult; +use super::Unit; + +pub struct Links<'a> { + validated: HashSet<&'a PackageId>, + links: HashMap, +} + +impl<'a> Links<'a> { + pub fn new() -> Links<'a> { + Links { + validated: HashSet::new(), + links: HashMap::new(), + } + } + + pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> { + if !self.validated.insert(unit.pkg.package_id()) { + return Ok(()) + } + let lib = match unit.pkg.manifest().links() { + Some(lib) => lib, + None => return Ok(()), + }; + if let Some(prev) = self.links.get(lib) { + let pkg = unit.pkg.package_id(); + + let describe_path = |pkgid: &PackageId| -> String { + let dep_path = resolve.path_to_top(pkgid); + if dep_path.is_empty() { + String::from("The root-package ") + } else { + let mut dep_path_desc = format!("Package `{}`\n", pkgid); + for dep in dep_path { + write!(dep_path_desc, + " ... which is depended on by `{}`\n", + dep).unwrap(); + } + dep_path_desc + } + }; + + bail!("Multiple packages link to native library `{}`. \ + A native library can be linked only once.\n\ + \n\ + {}links to native library `{}`.\n\ + \n\ + {}also links to native library `{}`.", + lib, + describe_path(prev), lib, + describe_path(pkg), lib) + } + if !unit.pkg.manifest().targets().iter().any(|t| t.is_custom_build()) { + bail!("package `{}` specifies that it links to `{}` but does not \ + have a custom build script", unit.pkg.package_id(), lib) + } + self.links.insert(lib.to_string(), unit.pkg.package_id()); + Ok(()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/mod.rs new file mode 100644 index 000000000..8dce388c0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/mod.rs @@ -0,0 +1,941 @@ +use std::collections::{HashMap, HashSet}; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::io::{self, Write}; +use std::path::{self, PathBuf}; +use std::sync::Arc; + +use same_file::is_same_file; +use serde_json; + +use core::{Package, PackageId, PackageSet, Target, Resolve}; +use core::{Profile, Profiles, Workspace}; +use core::shell::ColorChoice; +use util::{self, ProcessBuilder, machine_message}; +use util::{Config, internal, profile, join_paths}; +use util::errors::{CargoResult, CargoResultExt}; +use util::Freshness; + +use self::job::{Job, Work}; +use self::job_queue::JobQueue; + +use self::output_depinfo::output_depinfo; + +pub use self::compilation::Compilation; +pub use self::context::{Context, Unit, TargetFileType}; +pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts}; +pub use self::layout::is_bad_artifact_name; + +mod compilation; +mod context; +mod custom_build; +mod fingerprint; +mod job; +mod job_queue; +mod layout; +mod links; +mod output_depinfo; + +/// Whether an object is for the host arch, or the target arch. +/// +/// These will be the same unless cross-compiling. +#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] +pub enum Kind { Host, Target } + +/// Configuration information for a rustc build. +#[derive(Default, Clone)] +pub struct BuildConfig { + /// The host arch triple + /// + /// e.g. x86_64-unknown-linux-gnu, would be + /// - machine: x86_64 + /// - hardware-platform: unknown + /// - operating system: linux-gnu + pub host_triple: String, + /// Build information for the host arch + pub host: TargetConfig, + /// The target arch triple, defaults to host arch + pub requested_target: Option, + /// Build information for the target + pub target: TargetConfig, + /// How many rustc jobs to run in parallel + pub jobs: u32, + /// Whether we are building for release + pub release: bool, + /// Whether we are running tests + pub test: bool, + /// Whether we are building documentation + pub doc_all: bool, + /// Whether to print std output in json format (for machine reading) + pub json_messages: bool, +} + +/// Information required to build for a target +#[derive(Clone, Default)] +pub struct TargetConfig { + /// The path of archiver (lib builder) for this target. + pub ar: Option, + /// The path of the linker for this target. + pub linker: Option, + /// Special build options for any necessary input files (filename -> options) + pub overrides: HashMap, +} + +pub type PackagesToBuild<'a> = [(&'a Package, Vec<(&'a Target, &'a Profile)>)]; + +/// A glorified callback for executing calls to rustc. Rather than calling rustc +/// directly, we'll use an Executor, giving clients an opportunity to intercept +/// the build calls. +pub trait Executor: Send + Sync + 'static { + /// Called after a rustc process invocation is prepared up-front for a given + /// unit of work (may still be modified for runtime-known dependencies, when + /// the work is actually executed). + fn init(&self, _cx: &Context, _unit: &Unit) {} + + /// In case of an `Err`, Cargo will not continue with the build process for + /// this package. + fn exec(&self, + cmd: ProcessBuilder, + _id: &PackageId, + _target: &Target) + -> CargoResult<()> { + cmd.exec()?; + Ok(()) + } + + fn exec_json(&self, + cmd: ProcessBuilder, + _id: &PackageId, + _target: &Target, + handle_stdout: &mut FnMut(&str) -> CargoResult<()>, + handle_stderr: &mut FnMut(&str) -> CargoResult<()>) + -> CargoResult<()> { + cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?; + Ok(()) + } + + /// Queried when queuing each unit of work. If it returns true, then the + /// unit will always be rebuilt, independent of whether it needs to be. + fn force_rebuild(&self, _unit: &Unit) -> bool { + false + } +} + +/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's +/// default behaviour. +#[derive(Copy, Clone)] +pub struct DefaultExecutor; + +impl Executor for DefaultExecutor {} + +// Returns a mapping of the root package plus its immediate dependencies to +// where the compiled libraries are all located. +pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, + pkg_targets: &'a PackagesToBuild<'a>, + packages: &'a PackageSet<'cfg>, + resolve: &'a Resolve, + config: &'cfg Config, + build_config: BuildConfig, + profiles: &'a Profiles, + exec: Arc) + -> CargoResult> { + let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| { + let default_kind = if build_config.requested_target.is_some() { + Kind::Target + } else { + Kind::Host + }; + targets.iter().map(move |&(target, profile)| { + Unit { + pkg: pkg, + target: target, + profile: profile, + kind: if target.for_host() {Kind::Host} else {default_kind}, + } + }) + }).collect::>(); + + let mut cx = Context::new(ws, resolve, packages, config, + build_config, profiles)?; + + let mut queue = JobQueue::new(&cx); + + cx.prepare()?; + cx.probe_target_info(&units)?; + cx.build_used_in_plugin_map(&units)?; + custom_build::build_map(&mut cx, &units)?; + + for unit in units.iter() { + // Build up a list of pending jobs, each of which represent + // compiling a particular package. No actual work is executed as + // part of this, that's all done next as part of the `execute` + // function which will run everything in order with proper + // parallelism. + compile(&mut cx, &mut queue, unit, Arc::clone(&exec))?; + } + + // Now that we've figured out everything that we're going to do, do it! + queue.execute(&mut cx)?; + + for unit in units.iter() { + for &(ref dst, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() { + if file_type == TargetFileType::DebugInfo { + continue; + } + + let bindst = match *link_dst { + Some(ref link_dst) => link_dst, + None => dst, + }; + + if unit.profile.test { + cx.compilation.tests.push((unit.pkg.clone(), + unit.target.kind().clone(), + unit.target.name().to_string(), + dst.clone())); + } else if unit.target.is_bin() || unit.target.is_example() { + cx.compilation.binaries.push(bindst.clone()); + } else if unit.target.is_lib() { + let pkgid = unit.pkg.package_id().clone(); + cx.compilation.libraries.entry(pkgid).or_insert(HashSet::new()) + .insert((unit.target.clone(), dst.clone())); + } + } + + for dep in cx.dep_targets(unit)?.iter() { + if !unit.target.is_lib() { continue } + + if dep.profile.run_custom_build { + let out_dir = cx.build_script_out_dir(dep).display().to_string(); + cx.compilation.extra_env.entry(dep.pkg.package_id().clone()) + .or_insert(Vec::new()) + .push(("OUT_DIR".to_string(), out_dir)); + } + + if !dep.target.is_lib() { continue } + if dep.profile.doc { continue } + + let v = cx.target_filenames(dep)?; + cx.compilation.libraries + .entry(unit.pkg.package_id().clone()) + .or_insert(HashSet::new()) + .extend(v.iter().map(|&(ref f, _, _)| { + (dep.target.clone(), f.clone()) + })); + } + + let feats = cx.resolve.features(unit.pkg.package_id()); + cx.compilation.cfgs.entry(unit.pkg.package_id().clone()) + .or_insert_with(HashSet::new) + .extend(feats.iter().map(|feat| format!("feature=\"{}\"", feat))); + + output_depinfo(&mut cx, unit)?; + } + + for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { + cx.compilation.cfgs.entry(pkg.clone()) + .or_insert_with(HashSet::new) + .extend(output.cfgs.iter().cloned()); + + cx.compilation.extra_env.entry(pkg.clone()) + .or_insert_with(Vec::new) + .extend(output.env.iter().cloned()); + + for dir in output.library_paths.iter() { + cx.compilation.native_dirs.insert(dir.clone()); + } + } + cx.compilation.target = cx.target_triple().to_string(); + Ok(cx.compilation) +} + +fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, + jobs: &mut JobQueue<'a>, + unit: &Unit<'a>, + exec: Arc) -> CargoResult<()> { + if !cx.compiled.insert(*unit) { + return Ok(()) + } + + // Build up the work to be done to compile this unit, enqueuing it once + // we've got everything constructed. + let p = profile::start(format!("preparing: {}/{}", unit.pkg, + unit.target.name())); + fingerprint::prepare_init(cx, unit)?; + cx.links.validate(cx.resolve, unit)?; + + let (dirty, fresh, freshness) = if unit.profile.run_custom_build { + custom_build::prepare(cx, unit)? + } else if unit.profile.doc && unit.profile.test { + // we run these targets later, so this is just a noop for now + (Work::noop(), Work::noop(), Freshness::Fresh) + } else { + let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?; + let work = if unit.profile.doc { + rustdoc(cx, unit)? + } else { + rustc(cx, unit, Arc::clone(&exec))? + }; + // Need to link targets on both the dirty and fresh + let dirty = work.then(link_targets(cx, unit, false)?).then(dirty); + let fresh = link_targets(cx, unit, true)?.then(fresh); + + if exec.force_rebuild(unit) { + freshness = Freshness::Dirty; + } + + (dirty, fresh, freshness) + }; + jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?; + drop(p); + + // Be sure to compile all dependencies of this target as well. + for unit in cx.dep_targets(unit)?.iter() { + compile(cx, jobs, unit, exec.clone())?; + } + + Ok(()) +} + +fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + exec: Arc) -> CargoResult { + let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; + + let name = unit.pkg.name().to_string(); + + // If this is an upstream dep we don't want warnings from, turn off all + // lints. + if !cx.show_warnings(unit.pkg.package_id()) { + rustc.arg("--cap-lints").arg("allow"); + + // If this is an upstream dep but we *do* want warnings, make sure that they + // don't fail compilation. + } else if !unit.pkg.package_id().source_id().is_path() { + rustc.arg("--cap-lints").arg("warn"); + } + + let filenames = cx.target_filenames(unit)?; + let root = cx.out_dir(unit); + let kind = unit.kind; + + // Prepare the native lib state (extra -L and -l flags) + let build_state = cx.build_state.clone(); + let current_id = unit.pkg.package_id().clone(); + let build_deps = load_build_deps(cx, unit); + + // If we are a binary and the package also contains a library, then we + // don't pass the `-l` flags. + let pass_l_flag = unit.target.is_lib() || + !unit.pkg.targets().iter().any(|t| t.is_lib()); + let do_rename = unit.target.allows_underscores() && !unit.profile.test; + let real_name = unit.target.name().to_string(); + let crate_name = unit.target.crate_name(); + + // XXX(Rely on target_filenames iterator as source of truth rather than rederiving filestem) + let rustc_dep_info_loc = if do_rename && cx.target_metadata(unit).is_none() { + root.join(&crate_name) + } else { + root.join(&cx.file_stem(unit)) + }.with_extension("d"); + let dep_info_loc = fingerprint::dep_info_loc(cx, unit); + let cwd = cx.config.cwd().to_path_buf(); + + rustc.args(&cx.incremental_args(unit)?); + rustc.args(&cx.rustflags_args(unit)?); + let json_messages = cx.build_config.json_messages; + let package_id = unit.pkg.package_id().clone(); + let target = unit.target.clone(); + + exec.init(cx, unit); + let exec = exec.clone(); + + let root_output = cx.target_root().to_path_buf(); + + return Ok(Work::new(move |state| { + // Only at runtime have we discovered what the extra -L and -l + // arguments are for native libraries, so we process those here. We + // also need to be sure to add any -L paths for our plugins to the + // dynamic library load path as a plugin's dynamic library may be + // located somewhere in there. + // Finally, if custom environment variables have been produced by + // previous build scripts, we include them in the rustc invocation. + if let Some(build_deps) = build_deps { + let build_state = build_state.outputs.lock().unwrap(); + add_native_deps(&mut rustc, &build_state, &build_deps, + pass_l_flag, ¤t_id)?; + add_plugin_deps(&mut rustc, &build_state, &build_deps, + &root_output)?; + add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?; + } + + for &(ref filename, ref _link_dst, _linkable) in filenames.iter() { + // If there is both an rmeta and rlib, rustc will prefer to use the + // rlib, even if it is older. Therefore, we must delete the rlib to + // force using the new rmeta. + if filename.extension() == Some(OsStr::new("rmeta")) { + let dst = root.join(filename).with_extension("rlib"); + if dst.exists() { + fs::remove_file(&dst).chain_err(|| { + format!("Could not remove file: {}.", dst.display()) + })?; + } + } + } + + state.running(&rustc); + if json_messages { + exec.exec_json(rustc, &package_id, &target, + &mut |line| if !line.is_empty() { + Err(internal(&format!("compiler stdout is not empty: `{}`", line))) + } else { + Ok(()) + }, + &mut |line| { + // stderr from rustc can have a mix of JSON and non-JSON output + if line.starts_with('{') { + // Handle JSON lines + let compiler_message = serde_json::from_str(line).map_err(|_| { + internal(&format!("compiler produced invalid json: `{}`", line)) + })?; + + machine_message::emit(&machine_message::FromCompiler { + package_id: &package_id, + target: &target, + message: compiler_message, + }); + } else { + // Forward non-JSON to stderr + writeln!(io::stderr(), "{}", line)?; + } + Ok(()) + } + ).chain_err(|| { + format!("Could not compile `{}`.", name) + })?; + } else { + exec.exec(rustc, &package_id, &target).map_err(|e| e.into_internal()).chain_err(|| { + format!("Could not compile `{}`.", name) + })?; + } + + if do_rename && real_name != crate_name { + let dst = &filenames[0].0; + let src = dst.with_file_name(dst.file_name().unwrap() + .to_str().unwrap() + .replace(&real_name, &crate_name)); + if src.exists() && src.file_name() != dst.file_name() { + fs::rename(&src, &dst).chain_err(|| { + internal(format!("could not rename crate {:?}", src)) + })?; + } + } + + if fs::metadata(&rustc_dep_info_loc).is_ok() { + info!("Renaming dep_info {:?} to {:?}", rustc_dep_info_loc, dep_info_loc); + fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_err(|| { + internal(format!("could not rename dep info: {:?}", + rustc_dep_info_loc)) + })?; + fingerprint::append_current_dir(&dep_info_loc, &cwd)?; + } + + Ok(()) + })); + + // Add all relevant -L and -l flags from dependencies (now calculated and + // present in `state`) to the command provided + fn add_native_deps(rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + pass_l_flag: bool, + current_id: &PackageId) -> CargoResult<()> { + for key in build_scripts.to_link.iter() { + let output = build_state.get(key).ok_or_else(|| { + internal(format!("couldn't find build state for {}/{:?}", + key.0, key.1)) + })?; + for path in output.library_paths.iter() { + rustc.arg("-L").arg(path); + } + if key.0 == *current_id { + for cfg in &output.cfgs { + rustc.arg("--cfg").arg(cfg); + } + if pass_l_flag { + for name in output.library_links.iter() { + rustc.arg("-l").arg(name); + } + } + } + } + Ok(()) + } + + // Add all custom environment variables present in `state` (after they've + // been put there by one of the `build_scripts`) to the command provided. + fn add_custom_env(rustc: &mut ProcessBuilder, + build_state: &BuildMap, + current_id: &PackageId, + kind: Kind) -> CargoResult<()> { + let key = (current_id.clone(), kind); + if let Some(output) = build_state.get(&key) { + for &(ref name, ref value) in output.env.iter() { + rustc.env(name, value); + } + } + Ok(()) + } +} + +/// Link the compiled target (often of form `foo-{metadata_hash}`) to the +/// final target. This must happen during both "Fresh" and "Compile" +fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + fresh: bool) -> CargoResult { + let filenames = cx.target_filenames(unit)?; + let package_id = unit.pkg.package_id().clone(); + let target = unit.target.clone(); + let profile = unit.profile.clone(); + let features = cx.resolve.features_sorted(&package_id).into_iter() + .map(|s| s.to_owned()) + .collect(); + let json_messages = cx.build_config.json_messages; + + Ok(Work::new(move |_| { + // If we're a "root crate", e.g. the target of this compilation, then we + // hard link our outputs out of the `deps` directory into the directory + // above. This means that `cargo build` will produce binaries in + // `target/debug` which one probably expects. + let mut destinations = vec![]; + for &(ref src, ref link_dst, _file_type) in filenames.iter() { + // This may have been a `cargo rustc` command which changes the + // output, so the source may not actually exist. + if !src.exists() { + continue + } + let dst = match link_dst.as_ref() { + Some(dst) => dst, + None => { + destinations.push(src.display().to_string()); + continue; + } + }; + destinations.push(dst.display().to_string()); + + debug!("linking {} to {}", src.display(), dst.display()); + if is_same_file(src, dst).unwrap_or(false) { + continue + } + if dst.exists() { + fs::remove_file(&dst).chain_err(|| { + format!("failed to remove: {}", dst.display()) + })?; + } + + let link_result = if src.is_dir() { + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(target_os = "redox")] + use std::os::redox::fs::symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; + + symlink(src, dst) + } else { + fs::hard_link(src, dst) + }; + link_result + .or_else(|err| { + debug!("link failed {}. falling back to fs::copy", err); + fs::copy(src, dst).map(|_| ()) + }) + .chain_err(|| { + format!("failed to link or copy `{}` to `{}`", + src.display(), dst.display()) + })?; + } + + if json_messages { + machine_message::emit(&machine_message::Artifact { + package_id: &package_id, + target: &target, + profile: &profile, + features: features, + filenames: destinations, + fresh: fresh, + }); + } + Ok(()) + })) +} + +fn load_build_deps(cx: &Context, unit: &Unit) -> Option> { + cx.build_scripts.get(unit).cloned() +} + +// For all plugin dependencies, add their -L paths (now calculated and +// present in `state`) to the dynamic library load path for the command to +// execute. +fn add_plugin_deps(rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + root_output: &PathBuf) + -> CargoResult<()> { + let var = util::dylib_path_envvar(); + let search_path = rustc.get_env(var).unwrap_or_default(); + let mut search_path = env::split_paths(&search_path).collect::>(); + for id in build_scripts.plugins.iter() { + let key = (id.clone(), Kind::Host); + let output = build_state.get(&key).ok_or_else(|| { + internal(format!("couldn't find libs for plugin dep {}", id)) + })?; + search_path.append(&mut filter_dynamic_search_path(output.library_paths.iter(), + root_output)); + } + let search_path = join_paths(&search_path, var)?; + rustc.env(var, &search_path); + Ok(()) +} + +// Determine paths to add to the dynamic search path from -L entries +// +// Strip off prefixes like "native=" or "framework=" and filter out directories +// *not* inside our output directory since they are likely spurious and can cause +// clashes with system shared libraries (issue #3366). +fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec + where I: Iterator { + let mut search_path = vec![]; + for dir in paths { + let dir = match dir.to_str() { + Some(s) => { + let mut parts = s.splitn(2, '='); + match (parts.next(), parts.next()) { + (Some("native"), Some(path)) | + (Some("crate"), Some(path)) | + (Some("dependency"), Some(path)) | + (Some("framework"), Some(path)) | + (Some("all"), Some(path)) => path.into(), + _ => dir.clone(), + } + } + None => dir.clone(), + }; + if dir.starts_with(&root_output) { + search_path.push(dir); + } else { + debug!("Not including path {} in runtime library search path because it is \ + outside target root {}", dir.display(), root_output.display()); + } + } + search_path +} + +fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + crate_types: &[&str], + unit: &Unit<'a>) -> CargoResult { + let mut base = cx.compilation.rustc_process(unit.pkg)?; + base.inherit_jobserver(&cx.jobserver); + build_base_args(cx, &mut base, unit, crate_types); + build_deps_args(&mut base, cx, unit)?; + Ok(base) +} + + +fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>) -> CargoResult { + let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?; + rustdoc.inherit_jobserver(&cx.jobserver); + rustdoc.arg("--crate-name").arg(&unit.target.crate_name()) + .cwd(cx.config.cwd()) + .arg(&root_path(cx, unit)); + + if unit.kind != Kind::Host { + if let Some(target) = cx.requested_target() { + rustdoc.arg("--target").arg(target); + } + } + + let doc_dir = cx.out_dir(unit); + + // Create the documentation directory ahead of time as rustdoc currently has + // a bug where concurrent invocations will race to create this directory if + // it doesn't already exist. + fs::create_dir_all(&doc_dir)?; + + rustdoc.arg("-o").arg(doc_dir); + + for feat in cx.resolve.features_sorted(unit.pkg.package_id()) { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + + if let Some(ref args) = unit.profile.rustdoc_args { + rustdoc.args(args); + } + + build_deps_args(&mut rustdoc, cx, unit)?; + + rustdoc.args(&cx.rustdocflags_args(unit)?); + + let name = unit.pkg.name().to_string(); + let build_state = cx.build_state.clone(); + let key = (unit.pkg.package_id().clone(), unit.kind); + + Ok(Work::new(move |state| { + if let Some(output) = build_state.outputs.lock().unwrap().get(&key) { + for cfg in output.cfgs.iter() { + rustdoc.arg("--cfg").arg(cfg); + } + for &(ref name, ref value) in output.env.iter() { + rustdoc.env(name, value); + } + } + state.running(&rustdoc); + rustdoc.exec().chain_err(|| format!("Could not document `{}`.", name)) + })) +} + +// The path that we pass to rustc is actually fairly important because it will +// show up in error messages and the like. For this reason we take a few moments +// to ensure that something shows up pretty reasonably. +// +// The heuristic here is fairly simple, but the key idea is that the path is +// always "relative" to the current directory in order to be found easily. The +// path is only actually relative if the current directory is an ancestor if it. +// This means that non-path dependencies (git/registry) will likely be shown as +// absolute paths instead of relative paths. +fn root_path(cx: &Context, unit: &Unit) -> PathBuf { + let absolute = unit.pkg.root().join(unit.target.src_path()); + let cwd = cx.config.cwd(); + if absolute.starts_with(cwd) { + util::without_prefix(&absolute, cwd).map(|s| { + s.to_path_buf() + }).unwrap_or(absolute) + } else { + absolute + } +} + +fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + cmd: &mut ProcessBuilder, + unit: &Unit<'a>, + crate_types: &[&str]) { + let Profile { + ref opt_level, lto, codegen_units, ref rustc_args, debuginfo, + debug_assertions, overflow_checks, rpath, test, doc: _doc, + run_custom_build, ref panic, rustdoc_args: _, check, + } = *unit.profile; + assert!(!run_custom_build); + + // Move to cwd so the root_path() passed below is actually correct + cmd.cwd(cx.config.cwd()); + + cmd.arg("--crate-name").arg(&unit.target.crate_name()); + + cmd.arg(&root_path(cx, unit)); + + match cx.config.shell().color_choice() { + ColorChoice::Always => { cmd.arg("--color").arg("always"); } + ColorChoice::Never => { cmd.arg("--color").arg("never"); } + ColorChoice::CargoAuto => {} + } + + if cx.build_config.json_messages { + cmd.arg("--error-format").arg("json"); + } + + if !test { + for crate_type in crate_types.iter() { + cmd.arg("--crate-type").arg(crate_type); + } + } + + if check { + cmd.arg("--emit=dep-info,metadata"); + } else { + cmd.arg("--emit=dep-info,link"); + } + + let prefer_dynamic = (unit.target.for_host() && + !unit.target.is_custom_build()) || + (crate_types.contains(&"dylib") && + cx.ws.members().any(|p| p != unit.pkg)); + if prefer_dynamic { + cmd.arg("-C").arg("prefer-dynamic"); + } + + if opt_level != "0" { + cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); + } + + // If a panic mode was configured *and* we're not ever going to be used in a + // plugin, then we can compile with that panic mode. + // + // If we're used in a plugin then we'll eventually be linked to libsyntax + // most likely which isn't compiled with a custom panic mode, so we'll just + // get an error if we actually compile with that. This fixes `panic=abort` + // crates which have plugin dependencies, but unfortunately means that + // dependencies shared between the main application and plugins must be + // compiled without `panic=abort`. This isn't so bad, though, as the main + // application will still be compiled with `panic=abort`. + if let Some(panic) = panic.as_ref() { + if !cx.used_in_plugin.contains(unit) { + cmd.arg("-C").arg(format!("panic={}", panic)); + } + } + + // Disable LTO for host builds as prefer_dynamic and it are mutually + // exclusive. + if unit.target.can_lto() && lto && !unit.target.for_host() { + cmd.args(&["-C", "lto"]); + } else if let Some(n) = codegen_units { + // There are some restrictions with LTO and codegen-units, so we + // only add codegen units when LTO is not used. + cmd.arg("-C").arg(&format!("codegen-units={}", n)); + } + + if let Some(debuginfo) = debuginfo { + cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); + } + + if let Some(ref args) = *rustc_args { + cmd.args(args); + } + + // -C overflow-checks is implied by the setting of -C debug-assertions, + // so we only need to provide -C overflow-checks if it differs from + // the value of -C debug-assertions we would provide. + if opt_level != "0" { + if debug_assertions { + cmd.args(&["-C", "debug-assertions=on"]); + if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + } else if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !debug_assertions { + cmd.args(&["-C", "debug-assertions=off"]); + if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + + if test && unit.target.harness() { + cmd.arg("--test"); + } else if test { + cmd.arg("--cfg").arg("test"); + } + + // We ideally want deterministic invocations of rustc to ensure that + // rustc-caching strategies like sccache are able to cache more, so sort the + // feature list here. + for feat in cx.resolve.features_sorted(unit.pkg.package_id()) { + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + + match cx.target_metadata(unit) { + Some(m) => { + cmd.arg("-C").arg(&format!("metadata={}", m)); + cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); + } + None => { + cmd.arg("-C").arg(&format!("metadata={}", cx.target_short_hash(unit))); + } + } + + if rpath { + cmd.arg("-C").arg("rpath"); + } + + cmd.arg("--out-dir").arg(&cx.out_dir(unit)); + + fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, + val: Option<&OsStr>) { + if let Some(val) = val { + let mut joined = OsString::from(prefix); + joined.push(val); + cmd.arg(key).arg(joined); + } + } + + if unit.kind == Kind::Target { + opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref())); + } + + opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref())); + opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref())); +} + + +fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>) -> CargoResult<()> { + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.deps_dir(unit)); + deps + }); + + // Be sure that the host path is also listed. This'll ensure that proc-macro + // dependencies are correctly found (for reexported macros). + if let Kind::Target = unit.kind { + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.host_deps()); + deps + }); + } + + for unit in cx.dep_targets(unit)?.iter() { + if unit.profile.run_custom_build { + cmd.env("OUT_DIR", &cx.build_script_out_dir(unit)); + } + if unit.target.linkable() && !unit.profile.doc { + link_to(cmd, cx, unit)?; + } + } + + return Ok(()); + + fn link_to<'a, 'cfg>(cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>) -> CargoResult<()> { + for &(ref dst, _, file_type) in cx.target_filenames(unit)?.iter() { + if file_type != TargetFileType::Linkable { + continue + } + let mut v = OsString::new(); + v.push(&unit.target.crate_name()); + v.push("="); + v.push(cx.out_dir(unit)); + v.push(&path::MAIN_SEPARATOR.to_string()); + v.push(&dst.file_name().unwrap()); + cmd.arg("--extern").arg(&v); + } + Ok(()) + } +} + +fn envify(s: &str) -> String { + s.chars() + .flat_map(|c| c.to_uppercase()) + .map(|c| if c == '-' {'_'} else {c}) + .collect() +} + +impl Kind { + fn for_target(&self, target: &Target) -> Kind { + // Once we start compiling for the `Host` kind we continue doing so, but + // if we are a `Target` kind and then we start compiling for a target + // that needs to be on the host we lift ourselves up to `Host` + match *self { + Kind::Host => Kind::Host, + Kind::Target if target.for_host() => Kind::Host, + Kind::Target => Kind::Target, + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/output_depinfo.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/output_depinfo.rs new file mode 100644 index 000000000..b07b299f0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_rustc/output_depinfo.rs @@ -0,0 +1,95 @@ +use std::collections::HashSet; +use std::io::{Write, BufWriter, ErrorKind}; +use std::fs::{self, File}; +use std::path::{Path, PathBuf}; + +use ops::{Context, Unit}; +use util::{CargoResult, internal}; +use ops::cargo_rustc::fingerprint; + +fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { + let path = path.as_ref(); + let relpath = match basedir { + None => path, + Some(base) => match path.strip_prefix(base) { + Ok(relpath) => relpath, + _ => path, + } + }; + relpath.to_str().ok_or_else(|| internal("path not utf-8")).map(|f| f.replace(" ", "\\ ")) +} + +fn add_deps_for_unit<'a, 'b>( + deps: &mut HashSet, + context: &mut Context<'a, 'b>, + unit: &Unit<'a>, + visited: &mut HashSet>, +) + -> CargoResult<()> +{ + if !visited.insert(*unit) { + return Ok(()); + } + + // units representing the execution of a build script don't actually + // generate a dep info file, so we just keep on going below + if !unit.profile.run_custom_build { + // Add dependencies from rustc dep-info output (stored in fingerprint directory) + let dep_info_loc = fingerprint::dep_info_loc(context, unit); + if let Some(paths) = fingerprint::parse_dep_info(&dep_info_loc)? { + for path in paths { + deps.insert(path); + } + } else { + debug!("can't find dep_info for {:?} {:?}", + unit.pkg.package_id(), unit.profile); + return Err(internal("dep_info missing")); + } + } + + // Add rerun-if-changed dependencies + let key = (unit.pkg.package_id().clone(), unit.kind); + if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) { + for path in &output.rerun_if_changed { + deps.insert(path.into()); + } + } + + // Recursively traverse all transitive dependencies + for dep_unit in &context.dep_targets(unit)? { + let source_id = dep_unit.pkg.package_id().source_id(); + if source_id.is_path() { + add_deps_for_unit(deps, context, dep_unit, visited)?; + } + } + Ok(()) +} + +pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) -> CargoResult<()> { + let mut deps = HashSet::new(); + let mut visited = HashSet::new(); + let success = add_deps_for_unit(&mut deps, context, unit, &mut visited).is_ok(); + let basedir = None; // TODO + for &(_, ref link_dst, _) in context.target_filenames(unit)?.iter() { + if let Some(ref link_dst) = *link_dst { + let output_path = link_dst.with_extension("d"); + if success { + let mut outfile = BufWriter::new(File::create(output_path)?); + let target_fn = render_filename(link_dst, basedir)?; + write!(outfile, "{}:", target_fn)?; + for dep in &deps { + write!(outfile, " {}", render_filename(dep, basedir)?)?; + } + writeln!(outfile, "")?; + } else if let Err(err) = fs::remove_file(output_path) { + // dep-info generation failed, so delete output file. This will usually + // cause the build system to always rerun the build rule, which is correct + // if inefficient. + if err.kind() != ErrorKind::NotFound { + return Err(err.into()); + } + } + } + } + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_test.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_test.rs new file mode 100644 index 000000000..f808ff5e1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/cargo_test.rs @@ -0,0 +1,214 @@ +use std::ffi::{OsString, OsStr}; + +use ops::{self, Compilation}; +use util::{self, CargoTestError, Test, ProcessError}; +use util::errors::{CargoResult, CargoErrorKind, CargoError}; +use core::Workspace; + +pub struct TestOptions<'a> { + pub compile_opts: ops::CompileOptions<'a>, + pub no_run: bool, + pub no_fail_fast: bool, + pub only_doc: bool, +} + +pub fn run_tests(ws: &Workspace, + options: &TestOptions, + test_args: &[String]) -> CargoResult> { + let compilation = compile_tests(ws, options)?; + + if options.no_run { + return Ok(None) + } + let (test, mut errors) = if options.only_doc { + assert!(options.compile_opts.filter.is_specific()); + run_doc_tests(options, test_args, &compilation)? + } else { + run_unit_tests(options, test_args, &compilation)? + }; + + // If we have an error and want to fail fast, return + if !errors.is_empty() && !options.no_fail_fast { + return Ok(Some(CargoTestError::new(test, errors))) + } + + // If a specific test was requested or we're not running any tests at all, + // don't run any doc tests. + if options.compile_opts.filter.is_specific() { + match errors.len() { + 0 => return Ok(None), + _ => return Ok(Some(CargoTestError::new(test, errors))) + } + } + + let (doctest, docerrors) = run_doc_tests(options, test_args, &compilation)?; + let test = if docerrors.is_empty() { test } else { doctest }; + errors.extend(docerrors); + if errors.is_empty() { + Ok(None) + } else { + Ok(Some(CargoTestError::new(test, errors))) + } +} + +pub fn run_benches(ws: &Workspace, + options: &TestOptions, + args: &[String]) -> CargoResult> { + let mut args = args.to_vec(); + args.push("--bench".to_string()); + let compilation = compile_tests(ws, options)?; + + if options.no_run { + return Ok(None) + } + let (test, errors) = run_unit_tests(options, &args, &compilation)?; + match errors.len() { + 0 => Ok(None), + _ => Ok(Some(CargoTestError::new(test, errors))), + } +} + +fn compile_tests<'a>(ws: &Workspace<'a>, + options: &TestOptions<'a>) + -> CargoResult> { + let mut compilation = ops::compile(ws, &options.compile_opts)?; + compilation.tests.sort_by(|a, b| { + (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)) + }); + Ok(compilation) +} + +/// Run the unit and integration tests of a project. +fn run_unit_tests(options: &TestOptions, + test_args: &[String], + compilation: &Compilation) + -> CargoResult<(Test, Vec)> { + let config = options.compile_opts.config; + let cwd = options.compile_opts.config.cwd(); + + let mut errors = Vec::new(); + + for &(ref pkg, ref kind, ref test, ref exe) in &compilation.tests { + let to_display = match util::without_prefix(exe, cwd) { + Some(path) => path, + None => &**exe, + }; + let mut cmd = compilation.target_process(exe, pkg)?; + cmd.args(test_args); + config.shell().concise(|shell| { + shell.status("Running", to_display.display().to_string()) + })?; + config.shell().verbose(|shell| { + shell.status("Running", cmd.to_string()) + })?; + + let result = cmd.exec(); + + match result { + Err(CargoError(CargoErrorKind::ProcessErrorKind(e), .. )) => { + errors.push((kind.clone(), test.clone(), e)); + if !options.no_fail_fast { + break; + } + } + Err(e) => { + //This is an unexpected Cargo error rather than a test failure + return Err(e) + } + Ok(()) => {} + } + } + + if errors.len() == 1 { + let (kind, test, e) = errors.pop().unwrap(); + Ok((Test::UnitTest(kind, test), vec![e])) + } else { + Ok((Test::Multiple, errors.into_iter().map((|(_, _, e)| e)).collect())) + } +} + +fn run_doc_tests(options: &TestOptions, + test_args: &[String], + compilation: &Compilation) + -> CargoResult<(Test, Vec)> { + let mut errors = Vec::new(); + let config = options.compile_opts.config; + + // We don't build/rust doctests if target != host + if config.rustc()?.host != compilation.target { + return Ok((Test::Doc, errors)); + } + + let libs = compilation.to_doc_test.iter().map(|package| { + (package, package.targets().iter().filter(|t| t.doctested()) + .map(|t| (t.src_path(), t.name(), t.crate_name()))) + }); + + for (package, tests) in libs { + for (lib, name, crate_name) in tests { + config.shell().status("Doc-tests", name)?; + let mut p = compilation.rustdoc_process(package)?; + p.arg("--test").arg(lib) + .arg("--crate-name").arg(&crate_name); + + for &rust_dep in &[&compilation.deps_output] { + let mut arg = OsString::from("dependency="); + arg.push(rust_dep); + p.arg("-L").arg(arg); + } + + for native_dep in compilation.native_dirs.iter() { + p.arg("-L").arg(native_dep); + } + + for &host_rust_dep in &[&compilation.host_deps_output] { + let mut arg = OsString::from("dependency="); + arg.push(host_rust_dep); + p.arg("-L").arg(arg); + } + + for arg in test_args { + p.arg("--test-args").arg(arg); + } + + if let Some(cfgs) = compilation.cfgs.get(package.package_id()) { + for cfg in cfgs.iter() { + p.arg("--cfg").arg(cfg); + } + } + + let libs = &compilation.libraries[package.package_id()]; + for &(ref target, ref lib) in libs.iter() { + // Note that we can *only* doctest rlib outputs here. A + // staticlib output cannot be linked by the compiler (it just + // doesn't do that). A dylib output, however, can be linked by + // the compiler, but will always fail. Currently all dylibs are + // built as "static dylibs" where the standard library is + // statically linked into the dylib. The doc tests fail, + // however, for now as they try to link the standard library + // dynamically as well, causing problems. As a result we only + // pass `--extern` for rlib deps and skip out on all other + // artifacts. + if lib.extension() != Some(OsStr::new("rlib")) && + !target.for_host() { + continue + } + let mut arg = OsString::from(target.crate_name()); + arg.push("="); + arg.push(lib); + p.arg("--extern").arg(&arg); + } + + config.shell().verbose(|shell| { + shell.status("Running", p.to_string()) + })?; + if let Err(CargoError(CargoErrorKind::ProcessErrorKind(e), .. )) = p.exec() { + errors.push(e); + if !options.no_fail_fast { + return Ok((Test::Doc, errors)); + } + } + } + } + Ok((Test::Doc, errors)) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/lockfile.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/lockfile.rs new file mode 100644 index 000000000..7368bbf8a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/lockfile.rs @@ -0,0 +1,149 @@ +use std::io::prelude::*; + +use toml; + +use core::{Resolve, resolver, Workspace}; +use core::resolver::WorkspaceResolve; +use util::Filesystem; +use util::errors::{CargoResult, CargoResultExt}; +use util::toml as cargo_toml; + +pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { + if !ws.root().join("Cargo.lock").exists() { + return Ok(None) + } + + let root = Filesystem::new(ws.root().to_path_buf()); + let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; + + let mut s = String::new(); + f.read_to_string(&mut s).chain_err(|| { + format!("failed to read file: {}", f.path().display()) + })?; + + (|| -> CargoResult> { + let resolve : toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; + let v: resolver::EncodableResolve = resolve.try_into()?; + Ok(Some(v.into_resolve(ws)?)) + })().chain_err(|| { + format!("failed to parse lock file at: {}", f.path().display()) + }) +} + +pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> { + // Load the original lockfile if it exists. + let ws_root = Filesystem::new(ws.root().to_path_buf()); + let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); + let orig = orig.and_then(|mut f| { + let mut s = String::new(); + f.read_to_string(&mut s)?; + Ok(s) + }); + + let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap(); + + let mut out = String::new(); + + let deps = toml["package"].as_array().unwrap(); + for dep in deps.iter() { + let dep = dep.as_table().unwrap(); + + out.push_str("[[package]]\n"); + emit_package(dep, &mut out); + } + + if let Some(patch) = toml.get("patch") { + let list = patch["unused"].as_array().unwrap(); + for entry in list { + out.push_str("[[patch.unused]]\n"); + emit_package(entry.as_table().unwrap(), &mut out); + out.push_str("\n"); + } + } + + if let Some(meta) = toml.get("metadata") { + out.push_str("[metadata]\n"); + out.push_str(&meta.to_string()); + } + + // If the lockfile contents haven't changed so don't rewrite it. This is + // helpful on read-only filesystems. + if let Ok(orig) = orig { + if are_equal_lockfiles(orig, &out, ws) { + return Ok(()) + } + } + + if !ws.config().lock_update_allowed() { + let flag = if ws.config().network_allowed() {"--locked"} else {"--frozen"}; + bail!("the lock file needs to be updated but {} was passed to \ + prevent this", flag); + } + + // Ok, if that didn't work just write it out + ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| { + f.file().set_len(0)?; + f.write_all(out.as_bytes())?; + Ok(()) + }).chain_err(|| { + format!("failed to write {}", + ws.root().join("Cargo.lock").display()) + }) +} + +fn are_equal_lockfiles(mut orig: String, current: &str, ws: &Workspace) -> bool { + if has_crlf_line_endings(&orig) { + orig = orig.replace("\r\n", "\n"); + } + + // If we want to try and avoid updating the lockfile, parse both and + // compare them; since this is somewhat expensive, don't do it in the + // common case where we can update lockfiles. + if !ws.config().lock_update_allowed() { + let res: CargoResult = (|| { + let old: resolver::EncodableResolve = toml::from_str(&orig)?; + let new: resolver::EncodableResolve = toml::from_str(current)?; + Ok(old.into_resolve(ws)? == new.into_resolve(ws)?) + })(); + if let Ok(true) = res { + return true; + } + } + + current == orig +} + +fn has_crlf_line_endings(s: &str) -> bool { + // Only check the first line. + if let Some(lf) = s.find('\n') { + s[..lf].ends_with('\r') + } else { + false + } +} + +fn emit_package(dep: &toml::value::Table, out: &mut String) { + out.push_str(&format!("name = {}\n", &dep["name"])); + out.push_str(&format!("version = {}\n", &dep["version"])); + + if dep.contains_key("source") { + out.push_str(&format!("source = {}\n", &dep["source"])); + } + + if let Some(s) = dep.get("dependencies") { + let slice = s.as_array().unwrap(); + + if !slice.is_empty() { + out.push_str("dependencies = [\n"); + + for child in slice.iter() { + out.push_str(&format!(" {},\n", child)); + } + + out.push_str("]\n"); + } + out.push_str("\n"); + } else if dep.contains_key("replace") { + out.push_str(&format!("replace = {}\n\n", &dep["replace"])); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/mod.rs new file mode 100644 index 000000000..0cd1ec718 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/mod.rs @@ -0,0 +1,43 @@ +pub use self::cargo_clean::{clean, CleanOptions}; +pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOptions}; +pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages}; +pub use self::cargo_read_manifest::{read_package, read_packages}; +pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit}; +pub use self::cargo_rustc::{Context, is_bad_artifact_name}; +pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig}; +pub use self::cargo_rustc::{Executor, DefaultExecutor}; +pub use self::cargo_run::run; +pub use self::cargo_install::{install, install_list, uninstall}; +pub use self::cargo_new::{new, init, NewOptions, VersionControl}; +pub use self::cargo_doc::{doc, DocOptions}; +pub use self::cargo_generate_lockfile::{generate_lockfile}; +pub use self::cargo_generate_lockfile::{update_lockfile}; +pub use self::cargo_generate_lockfile::UpdateOptions; +pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile}; +pub use self::cargo_test::{run_tests, run_benches, TestOptions}; +pub use self::cargo_package::{package, PackageOpts}; +pub use self::registry::{publish, registry_configuration, RegistryConfig}; +pub use self::registry::{registry_login, search, http_proxy_exists, http_handle}; +pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; +pub use self::cargo_fetch::fetch; +pub use self::cargo_pkgid::pkgid; +pub use self::resolve::{resolve_ws, resolve_ws_precisely, resolve_with_previous}; +pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo}; + +mod cargo_clean; +mod cargo_compile; +mod cargo_doc; +mod cargo_fetch; +mod cargo_generate_lockfile; +mod cargo_install; +mod cargo_new; +mod cargo_output_metadata; +mod cargo_package; +mod cargo_pkgid; +mod cargo_read_manifest; +mod cargo_run; +mod cargo_rustc; +mod cargo_test; +mod lockfile; +mod registry; +mod resolve; diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/registry.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/registry.rs new file mode 100644 index 000000000..42ff2f872 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/registry.rs @@ -0,0 +1,445 @@ +use std::env; +use std::fs::{self, File}; +use std::iter::repeat; +use std::time::Duration; + +use curl::easy::{Easy, SslOpt}; +use git2; +use registry::{Registry, NewCrate, NewCrateDependency}; + +use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; + +use version; +use core::source::Source; +use core::{Package, SourceId, Workspace}; +use core::dependency::Kind; +use core::manifest::ManifestMetadata; +use ops; +use sources::{RegistrySource}; +use util::config::{self, Config}; +use util::paths; +use util::ToUrl; +use util::errors::{CargoError, CargoResult, CargoResultExt}; +use util::important_paths::find_root_manifest_for_wd; + +pub struct RegistryConfig { + pub index: Option, + pub token: Option, +} + +pub struct PublishOpts<'cfg> { + pub config: &'cfg Config, + pub token: Option, + pub index: Option, + pub verify: bool, + pub allow_dirty: bool, + pub jobs: Option, + pub target: Option<&'cfg str>, + pub dry_run: bool, +} + +pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { + let pkg = ws.current()?; + + if !pkg.publish() { + bail!("some crates cannot be published.\n\ + `{}` is marked as unpublishable", pkg.name()); + } + if !pkg.manifest().patch().is_empty() { + bail!("published crates cannot contain [patch] sections"); + } + + let (mut registry, reg_id) = registry(opts.config, + opts.token.clone(), + opts.index.clone())?; + verify_dependencies(pkg, ®_id)?; + + // Prepare a tarball, with a non-surpressable warning if metadata + // is missing since this is being put online. + let tarball = ops::package(ws, &ops::PackageOpts { + config: opts.config, + verify: opts.verify, + list: false, + check_metadata: true, + allow_dirty: opts.allow_dirty, + target: opts.target, + jobs: opts.jobs, + })?.unwrap(); + + // Upload said tarball to the specified destination + opts.config.shell().status("Uploading", pkg.package_id().to_string())?; + transmit(opts.config, pkg, tarball.file(), &mut registry, opts.dry_run)?; + + Ok(()) +} + +fn verify_dependencies(pkg: &Package, registry_src: &SourceId) + -> CargoResult<()> { + for dep in pkg.dependencies().iter() { + if dep.source_id().is_path() { + if !dep.specified_req() { + bail!("all path dependencies must have a version specified \ + when publishing.\ndependency `{}` does not specify \ + a version", dep.name()) + } + } else if dep.source_id() != registry_src { + bail!("crates cannot be published to crates.io with dependencies sourced from \ + a repository\neither publish `{}` as its own crate on crates.io and \ + specify a crates.io version as a dependency or pull it into this \ + repository and specify it with a path and version\n(crate `{}` has \ + repository path `{}`)", dep.name(), dep.name(), dep.source_id()); + } + } + Ok(()) +} + +fn transmit(config: &Config, + pkg: &Package, + tarball: &File, + registry: &mut Registry, + dry_run: bool) -> CargoResult<()> { + let deps = pkg.dependencies().iter().map(|dep| { + NewCrateDependency { + optional: dep.is_optional(), + default_features: dep.uses_default_features(), + name: dep.name().to_string(), + features: dep.features().to_vec(), + version_req: dep.version_req().to_string(), + target: dep.platform().map(|s| s.to_string()), + kind: match dep.kind() { + Kind::Normal => "normal", + Kind::Build => "build", + Kind::Development => "dev", + }.to_string(), + } + }).collect::>(); + let manifest = pkg.manifest(); + let ManifestMetadata { + ref authors, ref description, ref homepage, ref documentation, + ref keywords, ref readme, ref repository, ref license, ref license_file, + ref categories, ref badges, + } = *manifest.metadata(); + let readme = match *readme { + Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), + None => None, + }; + if let Some(ref file) = *license_file { + if fs::metadata(&pkg.root().join(file)).is_err() { + bail!("the license file `{}` does not exist", file) + } + } + + // Do not upload if performing a dry run + if dry_run { + config.shell().warn("aborting upload due to dry run")?; + return Ok(()); + } + + let publish = registry.publish(&NewCrate { + name: pkg.name().to_string(), + vers: pkg.version().to_string(), + deps: deps, + features: pkg.summary().features().clone(), + authors: authors.clone(), + description: description.clone(), + homepage: homepage.clone(), + documentation: documentation.clone(), + keywords: keywords.clone(), + categories: categories.clone(), + readme: readme, + repository: repository.clone(), + license: license.clone(), + license_file: license_file.clone(), + badges: badges.clone(), + }, tarball); + + match publish { + Ok(warnings) => { + if !warnings.invalid_categories.is_empty() { + let msg = format!("\ + the following are not valid category slugs and were \ + ignored: {}. Please see https://crates.io/category_slugs \ + for the list of all category slugs. \ + ", warnings.invalid_categories.join(", ")); + config.shell().warn(&msg)?; + } + + if !warnings.invalid_badges.is_empty() { + let msg = format!("\ + the following are not valid badges and were ignored: {}. \ + Either the badge type specified is unknown or a required \ + attribute is missing. Please see \ + http://doc.crates.io/manifest.html#package-metadata \ + for valid badge types and their required attributes.", + warnings.invalid_badges.join(", ")); + config.shell().warn(&msg)?; + } + + Ok(()) + }, + Err(e) => Err(e.into()), + } +} + +pub fn registry_configuration(config: &Config) -> CargoResult { + let index = config.get_string("registry.index")?.map(|p| p.val); + let token = config.get_string("registry.token")?.map(|p| p.val); + Ok(RegistryConfig { index: index, token: token }) +} + +pub fn registry(config: &Config, + token: Option, + index: Option) -> CargoResult<(Registry, SourceId)> { + // Parse all configuration options + let RegistryConfig { + token: token_config, + index: _index_config, + } = registry_configuration(config)?; + let token = token.or(token_config); + let sid = match index { + Some(index) => SourceId::for_registry(&index.to_url()?)?, + None => SourceId::crates_io(config)?, + }; + let api_host = { + let mut src = RegistrySource::remote(&sid, config); + src.update().chain_err(|| { + format!("failed to update {}", sid) + })?; + (src.config()?).unwrap().api + }; + let handle = http_handle(config)?; + Ok((Registry::new_handle(api_host, token, handle), sid)) +} + +/// Create a new HTTP handle with appropriate global configuration for cargo. +pub fn http_handle(config: &Config) -> CargoResult { + if !config.network_allowed() { + bail!("attempting to make an HTTP request, but --frozen was \ + specified") + } + + // The timeout option for libcurl by default times out the entire transfer, + // but we probably don't want this. Instead we only set timeouts for the + // connect phase as well as a "low speed" timeout so if we don't receive + // many bytes in a large-ish period of time then we time out. + let mut handle = Easy::new(); + handle.connect_timeout(Duration::new(30, 0))?; + handle.low_speed_limit(10 /* bytes per second */)?; + handle.low_speed_time(Duration::new(30, 0))?; + handle.useragent(&version().to_string())?; + if let Some(proxy) = http_proxy(config)? { + handle.proxy(&proxy)?; + } + if let Some(cainfo) = config.get_path("http.cainfo")? { + handle.cainfo(&cainfo.val)?; + } + if let Some(check) = config.get_bool("http.check-revoke")? { + handle.ssl_options(SslOpt::new().no_revoke(!check.val))?; + } + if let Some(timeout) = http_timeout(config)? { + handle.connect_timeout(Duration::new(timeout as u64, 0))?; + handle.low_speed_time(Duration::new(timeout as u64, 0))?; + } + Ok(handle) +} + +/// Find an explicit HTTP proxy if one is available. +/// +/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified +/// via environment variables are picked up by libcurl. +fn http_proxy(config: &Config) -> CargoResult> { + if let Some(s) = config.get_string("http.proxy")? { + return Ok(Some(s.val)) + } + if let Ok(cfg) = git2::Config::open_default() { + if let Ok(s) = cfg.get_str("http.proxy") { + return Ok(Some(s.to_string())) + } + } + Ok(None) +} + +/// Determine if an http proxy exists. +/// +/// Checks the following for existence, in order: +/// +/// * cargo's `http.proxy` +/// * git's `http.proxy` +/// * `http_proxy` env var +/// * `HTTP_PROXY` env var +/// * `https_proxy` env var +/// * `HTTPS_PROXY` env var +pub fn http_proxy_exists(config: &Config) -> CargoResult { + if http_proxy(config)?.is_some() { + Ok(true) + } else { + Ok(["http_proxy", "HTTP_PROXY", + "https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok())) + } +} + +pub fn http_timeout(config: &Config) -> CargoResult> { + if let Some(s) = config.get_i64("http.timeout")? { + return Ok(Some(s.val)) + } + Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) +} + +pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { + let RegistryConfig { token: old_token, .. } = registry_configuration(config)?; + if let Some(old_token) = old_token { + if old_token == token { + return Ok(()); + } + } + + config::save_credentials(config, token) +} + +pub struct OwnersOptions { + pub krate: Option, + pub token: Option, + pub index: Option, + pub to_add: Option>, + pub to_remove: Option>, + pub list: bool, +} + +pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { + let name = match opts.krate { + Some(ref name) => name.clone(), + None => { + let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; + let pkg = Package::for_path(&manifest_path, config)?; + pkg.name().to_string() + } + }; + + let (mut registry, _) = registry(config, opts.token.clone(), + opts.index.clone())?; + + if let Some(ref v) = opts.to_add { + let v = v.iter().map(|s| &s[..]).collect::>(); + let msg = registry.add_owners(&name, &v).map_err(|e| { + CargoError::from(format!("failed to invite owners to crate {}: {}", name, e)) + })?; + + config.shell().status("Owner", msg)?; + } + + if let Some(ref v) = opts.to_remove { + let v = v.iter().map(|s| &s[..]).collect::>(); + config.shell().status("Owner", format!("removing {:?} from crate {}", + v, name))?; + registry.remove_owners(&name, &v).map_err(|e| { + CargoError::from(format!("failed to remove owners from crate {}: {}", name, e)) + })?; + } + + if opts.list { + let owners = registry.list_owners(&name).map_err(|e| { + CargoError::from(format!("failed to list owners of crate {}: {}", name, e)) + })?; + for owner in owners.iter() { + print!("{}", owner.login); + match (owner.name.as_ref(), owner.email.as_ref()) { + (Some(name), Some(email)) => println!(" ({} <{}>)", name, email), + (Some(s), None) | + (None, Some(s)) => println!(" ({})", s), + (None, None) => println!(""), + } + } + } + + Ok(()) +} + +pub fn yank(config: &Config, + krate: Option, + version: Option, + token: Option, + index: Option, + undo: bool) -> CargoResult<()> { + let name = match krate { + Some(name) => name, + None => { + let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; + let pkg = Package::for_path(&manifest_path, config)?; + pkg.name().to_string() + } + }; + let version = match version { + Some(v) => v, + None => bail!("a version must be specified to yank") + }; + + let (mut registry, _) = registry(config, token, index)?; + + if undo { + config.shell().status("Unyank", format!("{}:{}", name, version))?; + registry.unyank(&name, &version).map_err(|e| { + CargoError::from(format!("failed to undo a yank: {}", e)) + })?; + } else { + config.shell().status("Yank", format!("{}:{}", name, version))?; + registry.yank(&name, &version).map_err(|e| { + CargoError::from(format!("failed to yank: {}", e)) + })?; + } + + Ok(()) +} + +pub fn search(query: &str, + config: &Config, + index: Option, + limit: u8) -> CargoResult<()> { + fn truncate_with_ellipsis(s: &str, max_length: usize) -> String { + if s.len() < max_length { + s.to_string() + } else { + format!("{}…", &s[..max_length - 1]) + } + } + + let (mut registry, _) = registry(config, None, index)?; + let (crates, total_crates) = registry.search(query, limit).map_err(|e| { + CargoError::from(format!("failed to retrieve search results from the registry: {}", e)) + })?; + + let list_items = crates.iter() + .map(|krate| ( + format!("{} = \"{}\"", krate.name, krate.max_version), + krate.description.as_ref().map(|desc| + truncate_with_ellipsis(&desc.replace("\n", " "), 128)) + )) + .collect::>(); + let description_margin = list_items.iter() + .map(|&(ref left, _)| left.len() + 4) + .max() + .unwrap_or(0); + + for (name, description) in list_items.into_iter() { + let line = match description { + Some(desc) => { + let space = repeat(' ').take(description_margin - name.len()) + .collect::(); + name + &space + "# " + &desc + } + None => name + }; + println!("{}", line); + } + + let search_max_limit = 100; + if total_crates > u32::from(limit) && limit < search_max_limit { + println!("... and {} crates more (use --limit N to see more)", + total_crates - u32::from(limit)); + } else if total_crates > u32::from(limit) && limit >= search_max_limit { + println!("... and {} crates more (go to http://crates.io/search?q={} to see more)", + total_crates - u32::from(limit), + percent_encode(query.as_bytes(), QUERY_ENCODE_SET)); + } + + Ok(()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/ops/resolve.rs b/collector/compile-benchmarks/cargo/src/cargo/ops/resolve.rs new file mode 100644 index 000000000..f26eb8e97 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/ops/resolve.rs @@ -0,0 +1,311 @@ +use std::collections::HashSet; + +use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; +use core::registry::PackageRegistry; +use core::resolver::{self, Resolve, Method}; +use sources::PathSource; +use ops; +use util::profile; +use util::errors::{CargoResult, CargoResultExt}; + +/// Resolve all dependencies for the workspace using the previous +/// lockfile as a guide if present. +/// +/// This function will also write the result of resolution as a new +/// lockfile. +pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = resolve_with_registry(ws, &mut registry, true)?; + let packages = get_resolved_packages(&resolve, registry); + Ok((packages, resolve)) +} + +/// Resolves dependencies for some packages of the workspace, +/// taking into account `paths` overrides and activated features. +pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>, + source: Option>, + features: &[String], + all_features: bool, + no_default_features: bool, + specs: &[PackageIdSpec]) + -> CargoResult<(PackageSet<'a>, Resolve)> { + let features = features.iter() + .flat_map(|s| s.split_whitespace()) + .flat_map(|s| s.split(',')) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()) + .collect::>(); + + let mut registry = PackageRegistry::new(ws.config())?; + if let Some(source) = source { + registry.add_preloaded(source); + } + + let resolve = if ws.require_optional_deps() { + // First, resolve the root_package's *listed* dependencies, as well as + // downloading and updating all remotes and such. + let resolve = resolve_with_registry(ws, &mut registry, false)?; + + // Second, resolve with precisely what we're doing. Filter out + // transitive dependencies if necessary, specify features, handle + // overrides, etc. + let _p = profile::start("resolving w/ overrides..."); + + add_overrides(&mut registry, ws)?; + + for &(ref replace_spec, ref dep) in ws.root_replace() { + if !resolve.iter().any(|r| replace_spec.matches(r) && !dep.matches_id(r)) { + ws.config().shell().warn( + format!("package replacement is not used: {}", replace_spec) + )? + } + } + + Some(resolve) + } else { + None + }; + + let method = if all_features { + Method::Everything + } else { + Method::Required { + dev_deps: true, // TODO: remove this option? + features: &features, + uses_default_features: !no_default_features, + } + }; + + let resolved_with_overrides = + ops::resolve_with_previous(&mut registry, ws, + method, resolve.as_ref(), None, + specs, true)?; + + let packages = get_resolved_packages(&resolved_with_overrides, registry); + + Ok((packages, resolved_with_overrides)) +} + +fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: bool) + -> CargoResult { + let prev = ops::load_pkg_lockfile(ws)?; + let resolve = resolve_with_previous(registry, ws, + Method::Everything, + prev.as_ref(), None, &[], warn)?; + + if !ws.is_ephemeral() { + ops::write_pkg_lockfile(ws, &resolve)?; + } + Ok(resolve) +} + + +/// Resolve all dependencies for a package using an optional previous instance +/// of resolve to guide the resolution process. +/// +/// This also takes an optional hash set, `to_avoid`, which is a list of package +/// ids that should be avoided when consulting the previous instance of resolve +/// (often used in pairings with updates). +/// +/// The previous resolve normally comes from a lockfile. This function does not +/// read or write lockfiles from the filesystem. +pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, + ws: &Workspace, + method: Method, + previous: Option<&'a Resolve>, + to_avoid: Option<&HashSet<&'a PackageId>>, + specs: &[PackageIdSpec], + warn: bool) + -> CargoResult { + // Here we place an artificial limitation that all non-registry sources + // cannot be locked at more than one revision. This means that if a git + // repository provides more than one package, they must all be updated in + // step when any of them are updated. + // + // TODO: This seems like a hokey reason to single out the registry as being + // different + let mut to_avoid_sources = HashSet::new(); + if let Some(to_avoid) = to_avoid { + to_avoid_sources.extend(to_avoid.iter() + .map(|p| p.source_id()) + .filter(|s| !s.is_registry())); + } + + let ref keep = |p: &&'a PackageId| { + !to_avoid_sources.contains(&p.source_id()) && match to_avoid { + Some(set) => !set.contains(p), + None => true, + } + }; + + // In the case where a previous instance of resolve is available, we + // want to lock as many packages as possible to the previous version + // without disturbing the graph structure. To this end we perform + // two actions here: + // + // 1. We inform the package registry of all locked packages. This + // involves informing it of both the locked package's id as well + // as the versions of all locked dependencies. The registry will + // then takes this information into account when it is queried. + // + // 2. The specified package's summary will have its dependencies + // modified to their precise variants. This will instruct the + // first step of the resolution process to not query for ranges + // but rather for precise dependency versions. + // + // This process must handle altered dependencies, however, as + // it's possible for a manifest to change over time to have + // dependencies added, removed, or modified to different version + // ranges. To deal with this, we only actually lock a dependency + // to the previously resolved version if the dependency listed + // still matches the locked version. + if let Some(r) = previous { + trace!("previous: {:?}", r); + for node in r.iter().filter(keep) { + let deps = r.deps_not_replaced(node) + .filter(keep) + .cloned().collect(); + registry.register_lock(node.clone(), deps); + } + } + + for (url, patches) in ws.root_patch() { + let previous = match previous { + Some(r) => r, + None => { + registry.patch(url, patches)?; + continue + } + }; + let patches = patches.iter().map(|dep| { + let unused = previous.unused_patches(); + let candidates = previous.iter().chain(unused); + match candidates.filter(keep).find(|id| dep.matches_id(id)) { + Some(id) => { + let mut dep = dep.clone(); + dep.lock_to(id); + dep + } + None => dep.clone(), + } + }).collect::>(); + registry.patch(url, &patches)?; + } + + let mut summaries = Vec::new(); + for member in ws.members() { + registry.add_sources(&[member.package_id().source_id().clone()])?; + let method_to_resolve = match method { + // When everything for a workspace we want to be sure to resolve all + // members in the workspace, so propagate the `Method::Everything`. + Method::Everything => Method::Everything, + + // If we're not resolving everything though then we're constructing the + // exact crate graph we're going to build. Here we don't necessarily + // want to keep around all workspace crates as they may not all be + // built/tested. + // + // Additionally, the `method` specified represents command line + // flags, which really only matters for the current package + // (determined by the cwd). If other packages are specified (via + // `-p`) then the command line flags like features don't apply to + // them. + // + // As a result, if this `member` is the current member of the + // workspace, then we use `method` specified. Otherwise we use a + // base method with no features specified but using default features + // for any other packages specified with `-p`. + Method::Required { dev_deps, .. } => { + let base = Method::Required { + dev_deps: dev_deps, + features: &[], + uses_default_features: true, + }; + let member_id = member.package_id(); + match ws.current_opt() { + Some(current) if member_id == current.package_id() => method, + _ => { + if specs.iter().any(|spec| spec.matches(member_id)) { + base + } else { + continue + } + } + } + } + }; + + let summary = registry.lock(member.summary().clone()); + summaries.push((summary, method_to_resolve)); + } + + let root_replace = ws.root_replace(); + + let replace = match previous { + Some(r) => { + root_replace.iter().map(|&(ref spec, ref dep)| { + for (key, val) in r.replacements().iter() { + if spec.matches(key) && dep.matches_id(val) && keep(&val) { + let mut dep = dep.clone(); + dep.lock_to(val); + return (spec.clone(), dep) + } + } + (spec.clone(), dep.clone()) + }).collect::>() + } + None => root_replace.to_vec(), + }; + + let config = if warn { + Some(ws.config()) + } else { + None + }; + let mut resolved = resolver::resolve(&summaries, + &replace, + registry, + config)?; + resolved.register_used_patches(registry.patches()); + if let Some(previous) = previous { + resolved.merge_from(previous)?; + } + Ok(resolved) +} + +/// Read the `paths` configuration variable to discover all path overrides that +/// have been configured. +fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, + ws: &Workspace<'a>) -> CargoResult<()> { + let paths = match ws.config().get_list("paths")? { + Some(list) => list, + None => return Ok(()) + }; + + let paths = paths.val.iter().map(|&(ref s, ref p)| { + // The path listed next to the string is the config file in which the + // key was located, so we want to pop off the `.cargo/config` component + // to get the directory containing the `.cargo` folder. + (p.parent().unwrap().parent().unwrap().join(s), p) + }); + + for (path, definition) in paths { + let id = SourceId::for_path(&path)?; + let mut source = PathSource::new_recursive(&path, &id, ws.config()); + source.update().chain_err(|| { + format!("failed to update path override `{}` \ + (defined in `{}`)", path.display(), + definition.display()) + })?; + registry.add_override(Box::new(source)); + } + Ok(()) +} + +fn get_resolved_packages<'a>(resolve: &Resolve, + registry: PackageRegistry<'a>) + -> PackageSet<'a> { + let ids: Vec = resolve.iter().cloned().collect(); + registry.get(&ids) +} + diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/config.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/config.rs new file mode 100644 index 000000000..5aa44110a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/config.rs @@ -0,0 +1,226 @@ +//! Implementation of configuration for various sources +//! +//! This module will parse the various `source.*` TOML configuration keys into a +//! structure usable by Cargo itself. Currently this is primarily used to map +//! sources to one another via the `replace-with` key in `.cargo/config`. + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use url::Url; + +use core::{Source, SourceId, GitReference}; +use sources::ReplacedSource; +use util::{Config, ToUrl}; +use util::config::ConfigValue; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +#[derive(Clone)] +pub struct SourceConfigMap<'cfg> { + cfgs: HashMap, + id2name: HashMap, + config: &'cfg Config, +} + +/// Configuration for a particular source, found in TOML looking like: +/// +/// ```toml +/// [source.crates-io] +/// registry = 'https://github.com/rust-lang/crates.io-index' +/// replace-with = 'foo' # optional +/// ``` +#[derive(Clone)] +struct SourceConfig { + // id this source corresponds to, inferred from the various defined keys in + // the configuration + id: SourceId, + + // Name of the source that this source should be replaced with. This field + // is a tuple of (name, path) where path is where this configuration key was + // defined (the literal `.cargo/config` file). + replace_with: Option<(String, PathBuf)>, +} + +impl<'cfg> SourceConfigMap<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap::empty(config)?; + if let Some(table) = config.get_table("source")? { + for (key, value) in table.val.iter() { + base.add_config(key, value)?; + } + } + Ok(base) + } + + pub fn empty(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap { + cfgs: HashMap::new(), + id2name: HashMap::new(), + config: config, + }; + base.add("crates-io", SourceConfig { + id: SourceId::crates_io(config)?, + replace_with: None, + }); + Ok(base) + } + + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn load(&self, id: &SourceId) -> CargoResult> { + debug!("loading: {}", id); + let mut name = match self.id2name.get(id) { + Some(name) => name, + None => return Ok(id.load(self.config)?), + }; + let mut path = Path::new("/"); + let orig_name = name; + let new_id; + loop { + let cfg = match self.cfgs.get(name) { + Some(cfg) => cfg, + None => bail!("could not find a configured source with the \ + name `{}` when attempting to lookup `{}` \ + (configuration in `{}`)", + name, orig_name, path.display()), + }; + match cfg.replace_with { + Some((ref s, ref p)) => { + name = s; + path = p; + } + None if *id == cfg.id => return Ok(id.load(self.config)?), + None => { + new_id = cfg.id.with_precise(id.precise() + .map(|s| s.to_string())); + break + } + } + debug!("following pointer to {}", name); + if name == orig_name { + bail!("detected a cycle of `replace-with` sources, the source \ + `{}` is eventually replaced with itself \ + (configuration in `{}`)", name, path.display()) + } + } + let new_src = new_id.load(self.config)?; + let old_src = id.load(self.config)?; + if !new_src.supports_checksums() && old_src.supports_checksums() { + bail!("\ +cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ +checksums, but `{name}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", orig = orig_name, name = name); + } + + if old_src.requires_precise() && id.precise().is_none() { + bail!("\ +the source {orig} requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build +", orig = orig_name); + } + + Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) + } + + fn add(&mut self, name: &str, cfg: SourceConfig) { + self.id2name.insert(cfg.id.clone(), name.to_string()); + self.cfgs.insert(name.to_string(), cfg); + } + + fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { + let (table, _path) = cfg.table(&format!("source.{}", name))?; + let mut srcs = Vec::new(); + if let Some(val) = table.get("registry") { + let url = url(val, &format!("source.{}.registry", name))?; + srcs.push(SourceId::for_registry(&url)?); + } + if let Some(val) = table.get("local-registry") { + let (s, path) = val.string(&format!("source.{}.local-registry", + name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_local_registry(&path)?); + } + if let Some(val) = table.get("directory") { + let (s, path) = val.string(&format!("source.{}.directory", + name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_directory(&path)?); + } + if let Some(val) = table.get("git") { + let url = url(val, &format!("source.{}.git", name))?; + let try = |s: &str| { + let val = match table.get(s) { + Some(s) => s, + None => return Ok(None), + }; + let key = format!("source.{}.{}", name, s); + val.string(&key).map(Some) + }; + let reference = match try("branch")? { + Some(b) => GitReference::Branch(b.0.to_string()), + None => { + match try("tag")? { + Some(b) => GitReference::Tag(b.0.to_string()), + None => { + match try("rev")? { + Some(b) => GitReference::Rev(b.0.to_string()), + None => GitReference::Branch("master".to_string()), + } + } + } + } + }; + srcs.push(SourceId::for_git(&url, reference)?); + } + if name == "crates-io" && srcs.is_empty() { + srcs.push(SourceId::crates_io(self.config)?); + } + + let mut srcs = srcs.into_iter(); + let src = srcs.next().ok_or_else(|| { + CargoError::from(format!("no source URL specified for `source.{}`, need \ + either `registry` or `local-registry` defined", + name)) + })?; + if srcs.next().is_some() { + return Err(format!("more than one source URL specified for \ + `source.{}`", name).into()) + } + + let mut replace_with = None; + if let Some(val) = table.get("replace-with") { + let (s, path) = val.string(&format!("source.{}.replace-with", + name))?; + replace_with = Some((s.to_string(), path.to_path_buf())); + } + + self.add(name, SourceConfig { + id: src, + replace_with: replace_with, + }); + + return Ok(()); + + fn url(cfg: &ConfigValue, key: &str) -> CargoResult { + let (url, path) = cfg.string(key)?; + url.to_url().chain_err(|| { + format!("configuration key `{}` specified an invalid \ + URL (in {})", key, path.display()) + + }) + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/directory.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/directory.rs new file mode 100644 index 000000000..902d64d22 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/directory.rs @@ -0,0 +1,204 @@ +use std::collections::HashMap; +use std::fmt::{self, Debug, Formatter}; +use std::fs::File; +use std::io::Read; +use std::path::{Path, PathBuf}; + +use hex::ToHex; + +use serde_json; + +use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; +use sources::PathSource; +use util::{Config, Sha256}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; + +pub struct DirectorySource<'cfg> { + source_id: SourceId, + root: PathBuf, + packages: HashMap, + config: &'cfg Config, +} + +#[derive(Deserialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl<'cfg> DirectorySource<'cfg> { + pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) + -> DirectorySource<'cfg> { + DirectorySource { + source_id: id.clone(), + root: path.to_path_buf(), + config: config, + packages: HashMap::new(), + } + } +} + +impl<'cfg> Debug for DirectorySource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "DirectorySource {{ root: {:?} }}", self.root) + } +} + +impl<'cfg> Registry for DirectorySource<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + let packages = self.packages.values().map(|p| &p.0); + let matches = packages.filter(|pkg| dep.matches(pkg.summary())); + for summary in matches.map(|pkg| pkg.summary().clone()) { + f(summary); + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + true + } +} + +impl<'cfg> Source for DirectorySource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + self.packages.clear(); + let entries = self.root.read_dir().chain_err(|| { + format!("failed to read root of directory source: {}", + self.root.display()) + })?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + // Ignore hidden/dot directories as they typically don't contain + // crates and otherwise may conflict with a VCS + // (rust-lang/cargo#3414). + if let Some(s) = path.file_name().and_then(|s| s.to_str()) { + if s.starts_with('.') { + continue + } + } + + // Vendor directories are often checked into a VCS, but throughout + // the lifetime of a vendor dir crates are often added and deleted. + // Some VCS implementations don't always fully delete the directory + // when a dir is removed from a different checkout. Sometimes a + // mostly-empty dir is left behind. + // + // To help work Cargo work by default in more cases we try to + // handle this case by default. If the directory looks like it only + // has dotfiles in it (or no files at all) then we skip it. + // + // In general we don't want to skip completely malformed directories + // to help with debugging, so we don't just ignore errors in + // `update` below. + let mut only_dotfile = true; + for entry in path.read_dir()?.filter_map(|e| e.ok()) { + if let Some(s) = entry.file_name().to_str() { + if s.starts_with('.') { + continue + } + } + only_dotfile = false; + } + if only_dotfile { + continue + } + + let mut src = PathSource::new(&path, &self.source_id, self.config); + src.update()?; + let pkg = src.root_package()?; + + let cksum_file = path.join(".cargo-checksum.json"); + let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { + format!("failed to load checksum `.cargo-checksum.json` \ + of {} v{}", + pkg.package_id().name(), + pkg.package_id().version()) + + })?; + let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { + format!("failed to decode `.cargo-checksum.json` of \ + {} v{}", + pkg.package_id().name(), + pkg.package_id().version()) + })?; + + let mut manifest = pkg.manifest().clone(); + let mut summary = manifest.summary().clone(); + if let Some(ref package) = cksum.package { + summary = summary.set_checksum(package.clone()); + } + manifest.set_summary(summary); + let pkg = Package::new(manifest, pkg.manifest_path()); + self.packages.insert(pkg.package_id().clone(), (pkg, cksum)); + } + + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| { + format!("failed to find package with id: {}", id).into() + }) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let (pkg, cksum) = match self.packages.get(id) { + Some(&(ref pkg, ref cksum)) => (pkg, cksum), + None => bail!("failed to find entry for `{}` in directory source", + id), + }; + + let mut buf = [0; 16 * 1024]; + for (file, cksum) in cksum.files.iter() { + let mut h = Sha256::new(); + let file = pkg.root().join(file); + + (|| -> CargoResult<()> { + let mut f = File::open(&file)?; + loop { + match f.read(&mut buf)? { + 0 => return Ok(()), + n => h.update(&buf[..n]), + } + } + })().chain_err(|| { + format!("failed to calculate checksum of: {}", + file.display()) + })?; + + let actual = h.finish().to_hex(); + if &*actual != cksum { + bail!("\ + the listed checksum of `{}` has changed:\n\ + expected: {}\n\ + actual: {}\n\ + \n\ + directory sources are not intended to be edited, if \ + modifications are required then it is recommended \ + that [replace] is used with a forked copy of the \ + source\ + ", file.display(), cksum, actual); + } + } + + Ok(()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/git/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/git/mod.rs new file mode 100644 index 000000000..0ef4db4d6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/git/mod.rs @@ -0,0 +1,4 @@ +pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch}; +pub use self::source::{GitSource, canonicalize_url}; +mod utils; +mod source; diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/git/source.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/git/source.rs new file mode 100644 index 000000000..13e266b04 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/git/source.rs @@ -0,0 +1,268 @@ +use std::fmt::{self, Debug, Formatter}; + +use url::Url; + +use core::source::{Source, SourceId}; +use core::GitReference; +use core::{Package, PackageId, Summary, Registry, Dependency}; +use util::Config; +use util::errors::{CargoError, CargoResult}; +use util::hex::short_hash; +use sources::PathSource; +use sources::git::utils::{GitRemote, GitRevision}; + +/* TODO: Refactor GitSource to delegate to a PathSource + */ +pub struct GitSource<'cfg> { + remote: GitRemote, + reference: GitReference, + source_id: SourceId, + path_source: Option>, + rev: Option, + ident: String, + config: &'cfg Config, +} + +impl<'cfg> GitSource<'cfg> { + pub fn new(source_id: &SourceId, + config: &'cfg Config) -> CargoResult> { + assert!(source_id.is_git(), "id is not git, id={}", source_id); + + let remote = GitRemote::new(source_id.url()); + let ident = ident(source_id.url())?; + + let reference = match source_id.precise() { + Some(s) => GitReference::Rev(s.to_string()), + None => source_id.git_reference().unwrap().clone(), + }; + + let source = GitSource { + remote: remote, + reference: reference, + source_id: source_id.clone(), + path_source: None, + rev: None, + ident: ident, + config: config, + }; + + Ok(source) + } + + pub fn url(&self) -> &Url { self.remote.url() } + + pub fn read_packages(&mut self) -> CargoResult> { + if self.path_source.is_none() { + self.update()?; + } + self.path_source.as_mut().unwrap().read_packages() + } +} + +fn ident(url: &Url) -> CargoResult { + let url = canonicalize_url(url)?; + let ident = url.path_segments().and_then(|mut s| s.next_back()).unwrap_or(""); + + let ident = if ident == "" { + "_empty" + } else { + ident + }; + + Ok(format!("{}-{}", ident, short_hash(&url))) +} + +// Some hacks and heuristics for making equivalent URLs hash the same +pub fn canonicalize_url(url: &Url) -> CargoResult { + let mut url = url.clone(); + + // cannot-be-a-base-urls are not supported + // eg. github.com:rust-lang-nursery/rustfmt.git + if url.cannot_be_a_base() { + return Err(format!("invalid url `{}`: cannot-be-a-base-URLs are not supported", url).into()); + } + + // Strip a trailing slash + if url.path().ends_with('/') { + url.path_segments_mut().unwrap().pop_if_empty(); + } + + // HACKHACK: For github URL's specifically just lowercase + // everything. GitHub treats both the same, but they hash + // differently, and we're gonna be hashing them. This wants a more + // general solution, and also we're almost certainly not using the + // same case conversion rules that GitHub does. (#84) + if url.host_str() == Some("github.com") { + url.set_scheme("https").unwrap(); + let path = url.path().to_lowercase(); + url.set_path(&path); + } + + // Repos generally can be accessed with or w/o '.git' + let needs_chopping = url.path().ends_with(".git"); + if needs_chopping { + let last = { + let last = url.path_segments().unwrap().next_back().unwrap(); + last[..last.len() - 4].to_owned() + }; + url.path_segments_mut().unwrap().pop().push(&last); + } + + Ok(url) +} + +impl<'cfg> Debug for GitSource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "git repo at {}", self.remote.url())?; + + match self.reference.pretty_ref() { + Some(s) => write!(f, " ({})", s), + None => Ok(()) + } + } +} + +impl<'cfg> Registry for GitSource<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + let src = self.path_source.as_mut() + .expect("BUG: update() must be called before query()"); + src.query(dep, f) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + true + } +} + +impl<'cfg> Source for GitSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + let lock = self.config.git_path() + .open_rw(".cargo-lock-git", self.config, "the git checkouts")?; + + let db_path = lock.parent().join("db").join(&self.ident); + + // Resolve our reference to an actual revision, and check if the + // database already has that revision. If it does, we just load a + // database pinned at that revision, and if we don't we issue an update + // to try to find the revision. + let actual_rev = self.remote.rev_for(&db_path, &self.reference); + let should_update = actual_rev.is_err() || + self.source_id.precise().is_none(); + + let (repo, actual_rev) = if should_update { + self.config.shell().status("Updating", + format!("git repository `{}`", self.remote.url()))?; + + trace!("updating git source `{:?}`", self.remote); + + let repo = self.remote.checkout(&db_path, self.config)?; + let rev = repo.rev_for(&self.reference).map_err(CargoError::into_internal)?; + (repo, rev) + } else { + (self.remote.db_at(&db_path)?, actual_rev.unwrap()) + }; + + // Don’t use the full hash, + // to contribute less to reaching the path length limit on Windows: + // https://github.com/servo/servo/pull/14397 + let short_id = repo.to_short_id(actual_rev.clone()).unwrap(); + + let checkout_path = lock.parent().join("checkouts") + .join(&self.ident).join(short_id.as_str()); + + // Copy the database to the checkout location. After this we could drop + // the lock on the database as we no longer needed it, but we leave it + // in scope so the destructors here won't tamper with too much. + // Checkout is immutable, so we don't need to protect it with a lock once + // it is created. + repo.copy_to(actual_rev.clone(), &checkout_path, self.config)?; + + let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); + let path_source = PathSource::new_recursive(&checkout_path, + &source_id, + self.config); + + self.path_source = Some(path_source); + self.rev = Some(actual_rev); + self.path_source.as_mut().unwrap().update() + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!("getting packages for package id `{}` from `{:?}`", id, + self.remote); + self.path_source.as_mut() + .expect("BUG: update() must be called before get()") + .download(id) + } + + fn fingerprint(&self, _pkg: &Package) -> CargoResult { + Ok(self.rev.as_ref().unwrap().to_string()) + } +} + +#[cfg(test)] +mod test { + use url::Url; + use super::ident; + use util::ToUrl; + + #[test] + pub fn test_url_to_path_ident_with_path() { + let ident = ident(&url("https://github.com/carlhuda/cargo")).unwrap(); + assert!(ident.starts_with("cargo-")); + } + + #[test] + pub fn test_url_to_path_ident_without_path() { + let ident = ident(&url("https://github.com")).unwrap(); + assert!(ident.starts_with("_empty-")); + } + + #[test] + fn test_canonicalize_idents_by_stripping_trailing_url_slash() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_by_lowercasing_github_urls() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/pistondevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_by_stripping_dot_git() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_different_protocols() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_cannot_be_a_base_urls() { + assert!(ident(&url("github.com:PistonDevelopers/piston")).is_err()); + assert!(ident(&url("google.com:PistonDevelopers/piston")).is_err()); + } + + fn url(s: &str) -> Url { + s.to_url().unwrap() + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/git/utils.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/git/utils.rs new file mode 100644 index 000000000..505fd24a8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/git/utils.rs @@ -0,0 +1,733 @@ +use std::env; +use std::fmt; +use std::fs::{self, File}; +use std::mem; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use curl::easy::{Easy, List}; +use git2::{self, ObjectType}; +use serde::ser::{self, Serialize}; +use url::Url; + +use core::GitReference; +use util::{ToUrl, internal, Config, network}; +use util::errors::{CargoResult, CargoResultExt, CargoError}; + +#[derive(PartialEq, Clone, Debug)] +pub struct GitRevision(git2::Oid); + +impl ser::Serialize for GitRevision { + fn serialize(&self, s: S) -> Result { + serialize_str(self, s) + } +} + +fn serialize_str(t: &T, s: S) -> Result + where T: fmt::Display, + S: ser::Serializer, +{ + t.to_string().serialize(s) +} + +impl fmt::Display for GitRevision { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + +pub struct GitShortID(git2::Buf); + +impl GitShortID { + pub fn as_str(&self) -> &str { + self.0.as_str().unwrap() + } +} + +/// `GitRemote` represents a remote repository. It gets cloned into a local +/// `GitDatabase`. +#[derive(PartialEq, Clone, Debug, Serialize)] +pub struct GitRemote { + #[serde(serialize_with = "serialize_str")] + url: Url, +} + +/// `GitDatabase` is a local clone of a remote repository's database. Multiple +/// `GitCheckouts` can be cloned from this `GitDatabase`. +#[derive(Serialize)] +pub struct GitDatabase { + remote: GitRemote, + path: PathBuf, + #[serde(skip_serializing)] + repo: git2::Repository, +} + +/// `GitCheckout` is a local checkout of a particular revision. Calling +/// `clone_into` with a reference will resolve the reference into a revision, +/// and return a `CargoError` if no revision for that reference was found. +#[derive(Serialize)] +pub struct GitCheckout<'a> { + database: &'a GitDatabase, + location: PathBuf, + revision: GitRevision, + #[serde(skip_serializing)] + repo: git2::Repository, +} + +// Implementations + +impl GitRemote { + pub fn new(url: &Url) -> GitRemote { + GitRemote { url: url.clone() } + } + + pub fn url(&self) -> &Url { + &self.url + } + + pub fn rev_for(&self, path: &Path, reference: &GitReference) + -> CargoResult { + let db = self.db_at(path)?; + db.rev_for(reference) + } + + pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult { + let repo = match git2::Repository::open(into) { + Ok(mut repo) => { + self.fetch_into(&mut repo, cargo_config).chain_err(|| { + format!("failed to fetch into {}", into.display()) + })?; + repo + } + Err(..) => { + self.clone_into(into, cargo_config).chain_err(|| { + format!("failed to clone into: {}", into.display()) + })? + } + }; + + Ok(GitDatabase { + remote: self.clone(), + path: into.to_path_buf(), + repo: repo, + }) + } + + pub fn db_at(&self, db_path: &Path) -> CargoResult { + let repo = git2::Repository::open(db_path)?; + Ok(GitDatabase { + remote: self.clone(), + path: db_path.to_path_buf(), + repo: repo, + }) + } + + fn fetch_into(&self, dst: &mut git2::Repository, cargo_config: &Config) -> CargoResult<()> { + // Create a local anonymous remote in the repository to fetch the url + let refspec = "refs/heads/*:refs/heads/*"; + fetch(dst, &self.url, refspec, cargo_config) + } + + fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { + if fs::metadata(&dst).is_ok() { + fs::remove_dir_all(dst)?; + } + fs::create_dir_all(dst)?; + let mut repo = git2::Repository::init_bare(dst)?; + fetch(&mut repo, &self.url, "refs/heads/*:refs/heads/*", cargo_config)?; + Ok(repo) + } +} + +impl GitDatabase { + fn path(&self) -> &Path { + &self.path + } + + pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config) + -> CargoResult { + let checkout = match git2::Repository::open(dest) { + Ok(repo) => { + let mut checkout = GitCheckout::new(dest, self, rev, repo); + if !checkout.is_fresh() { + checkout.fetch(cargo_config)?; + checkout.reset()?; + assert!(checkout.is_fresh()); + } + checkout + } + Err(..) => GitCheckout::clone_into(dest, self, rev)?, + }; + checkout.update_submodules(cargo_config)?; + Ok(checkout) + } + + pub fn rev_for(&self, reference: &GitReference) -> CargoResult { + let id = match *reference { + GitReference::Tag(ref s) => { + (|| -> CargoResult { + let refname = format!("refs/tags/{}", s); + let id = self.repo.refname_to_id(&refname)?; + let obj = self.repo.find_object(id, None)?; + let obj = obj.peel(ObjectType::Commit)?; + Ok(obj.id()) + })().chain_err(|| { + format!("failed to find tag `{}`", s) + })? + } + GitReference::Branch(ref s) => { + (|| { + let b = self.repo.find_branch(s, git2::BranchType::Local)?; + b.get().target().ok_or_else(|| { + CargoError::from(format!("branch `{}` did not have a target", s)) + }) + })().chain_err(|| { + format!("failed to find branch `{}`", s) + })? + } + GitReference::Rev(ref s) => { + let obj = self.repo.revparse_single(s)?; + match obj.as_tag() { + Some(tag) => tag.target_id(), + None => obj.id(), + } + } + }; + Ok(GitRevision(id)) + } + + pub fn to_short_id(&self, revision: GitRevision) -> CargoResult { + let obj = self.repo.find_object(revision.0, None)?; + Ok(GitShortID(obj.short_id()?)) + } + + pub fn has_ref(&self, reference: &str) -> CargoResult<()> { + self.repo.revparse_single(reference)?; + Ok(()) + } +} + +impl<'a> GitCheckout<'a> { + fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision, + repo: git2::Repository) + -> GitCheckout<'a> + { + GitCheckout { + location: path.to_path_buf(), + database: database, + revision: revision, + repo: repo, + } + } + + fn clone_into(into: &Path, database: &'a GitDatabase, + revision: GitRevision) + -> CargoResult> + { + let repo = GitCheckout::clone_repo(database.path(), into)?; + let checkout = GitCheckout::new(into, database, revision, repo); + checkout.reset()?; + Ok(checkout) + } + + fn clone_repo(source: &Path, into: &Path) -> CargoResult { + let dirname = into.parent().unwrap(); + + fs::create_dir_all(&dirname).chain_err(|| { + format!("Couldn't mkdir {}", dirname.display()) + })?; + + if fs::metadata(&into).is_ok() { + fs::remove_dir_all(into).chain_err(|| { + format!("Couldn't rmdir {}", into.display()) + })?; + } + + let url = source.to_url()?; + let url = url.to_string(); + let repo = git2::Repository::clone(&url, into) + .chain_err(|| { + internal(format!("failed to clone {} into {}", source.display(), + into.display())) + })?; + Ok(repo) + } + + fn is_fresh(&self) -> bool { + match self.repo.revparse_single("HEAD") { + Ok(ref head) if head.id() == self.revision.0 => { + // See comments in reset() for why we check this + fs::metadata(self.location.join(".cargo-ok")).is_ok() + } + _ => false, + } + } + + fn fetch(&mut self, cargo_config: &Config) -> CargoResult<()> { + info!("fetch {}", self.repo.path().display()); + let url = self.database.path.to_url()?; + let refspec = "refs/heads/*:refs/heads/*"; + fetch(&mut self.repo, &url, refspec, cargo_config)?; + Ok(()) + } + + fn reset(&self) -> CargoResult<()> { + // If we're interrupted while performing this reset (e.g. we die because + // of a signal) Cargo needs to be sure to try to check out this repo + // again on the next go-round. + // + // To enable this we have a dummy file in our checkout, .cargo-ok, which + // if present means that the repo has been successfully reset and is + // ready to go. Hence if we start to do a reset, we make sure this file + // *doesn't* exist, and then once we're done we create the file. + let ok_file = self.location.join(".cargo-ok"); + let _ = fs::remove_file(&ok_file); + info!("reset {} to {}", self.repo.path().display(), self.revision); + let object = self.repo.find_object(self.revision.0, None)?; + self.repo.reset(&object, git2::ResetType::Hard, None)?; + File::create(ok_file)?; + Ok(()) + } + + fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { + return update_submodules(&self.repo, cargo_config); + + fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { + info!("update submodules for: {:?}", repo.workdir().unwrap()); + + for mut child in repo.submodules()? { + update_submodule(repo, &mut child, cargo_config) + .map_err(CargoError::into_internal) + .chain_err(|| { + format!("failed to update submodule `{}`", + child.name().unwrap_or("")) + })?; + } + Ok(()) + } + + fn update_submodule(parent: &git2::Repository, + child: &mut git2::Submodule, + cargo_config: &Config) -> CargoResult<()> { + child.init(false)?; + let url = child.url().ok_or_else(|| { + internal("non-utf8 url for submodule") + })?; + + // A submodule which is listed in .gitmodules but not actually + // checked out will not have a head id, so we should ignore it. + let head = match child.head_id() { + Some(head) => head, + None => return Ok(()), + }; + + // If the submodule hasn't been checked out yet, we need to + // clone it. If it has been checked out and the head is the same + // as the submodule's head, then we can bail out and go to the + // next submodule. + let head_and_repo = child.open().and_then(|repo| { + let target = repo.head()?.target(); + Ok((target, repo)) + }); + let mut repo = match head_and_repo { + Ok((head, repo)) => { + if child.head_id() == head { + return Ok(()) + } + repo + } + Err(..) => { + let path = parent.workdir().unwrap().join(child.path()); + let _ = fs::remove_dir_all(&path); + git2::Repository::clone(url, &path)? + } + }; + + // Fetch data from origin and reset to the head commit + let refspec = "refs/heads/*:refs/heads/*"; + let url = url.to_url()?; + fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| { + internal(format!("failed to fetch submodule `{}` from {}", + child.name().unwrap_or(""), url)) + })?; + + repo.find_object(head, None) + .and_then(|obj| { repo.reset(&obj, git2::ResetType::Hard, None)})?; + update_submodules(&repo, cargo_config) + } + } +} + +/// Prepare the authentication callbacks for cloning a git repository. +/// +/// The main purpose of this function is to construct the "authentication +/// callback" which is used to clone a repository. This callback will attempt to +/// find the right authentication on the system (without user input) and will +/// guide libgit2 in doing so. +/// +/// The callback is provided `allowed` types of credentials, and we try to do as +/// much as possible based on that: +/// +/// * Prioritize SSH keys from the local ssh agent as they're likely the most +/// reliable. The username here is prioritized from the credential +/// callback, then from whatever is configured in git itself, and finally +/// we fall back to the generic user of `git`. +/// +/// * If a username/password is allowed, then we fallback to git2-rs's +/// implementation of the credential helper. This is what is configured +/// with `credential.helper` in git, and is the interface for the OSX +/// keychain, for example. +/// +/// * After the above two have failed, we just kinda grapple attempting to +/// return *something*. +/// +/// If any form of authentication fails, libgit2 will repeatedly ask us for +/// credentials until we give it a reason to not do so. To ensure we don't +/// just sit here looping forever we keep track of authentications we've +/// attempted and we don't try the same ones again. +fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) + -> CargoResult + where F: FnMut(&mut git2::Credentials) -> CargoResult +{ + let mut cred_helper = git2::CredentialHelper::new(url); + cred_helper.config(cfg); + + let mut ssh_username_requested = false; + let mut cred_helper_bad = None; + let mut ssh_agent_attempts = Vec::new(); + let mut any_attempts = false; + let mut tried_sshkey = false; + + let mut res = f(&mut |url, username, allowed| { + any_attempts = true; + // libgit2's "USERNAME" authentication actually means that it's just + // asking us for a username to keep going. This is currently only really + // used for SSH authentication and isn't really an authentication type. + // The logic currently looks like: + // + // let user = ...; + // if (user.is_null()) + // user = callback(USERNAME, null, ...); + // + // callback(SSH_KEY, user, ...) + // + // So if we're being called here then we know that (a) we're using ssh + // authentication and (b) no username was specified in the URL that + // we're trying to clone. We need to guess an appropriate username here, + // but that may involve a few attempts. Unfortunately we can't switch + // usernames during one authentication session with libgit2, so to + // handle this we bail out of this authentication session after setting + // the flag `ssh_username_requested`, and then we handle this below. + if allowed.contains(git2::USERNAME) { + debug_assert!(username.is_none()); + ssh_username_requested = true; + return Err(git2::Error::from_str("gonna try usernames later")) + } + + // An "SSH_KEY" authentication indicates that we need some sort of SSH + // authentication. This can currently either come from the ssh-agent + // process or from a raw in-memory SSH key. Cargo only supports using + // ssh-agent currently. + // + // If we get called with this then the only way that should be possible + // is if a username is specified in the URL itself (e.g. `username` is + // Some), hence the unwrap() here. We try custom usernames down below. + if allowed.contains(git2::SSH_KEY) && !tried_sshkey { + // If ssh-agent authentication fails, libgit2 will keep + // calling this callback asking for other authentication + // methods to try. Make sure we only try ssh-agent once, + // to avoid looping forever. + tried_sshkey = true; + let username = username.unwrap(); + debug_assert!(!ssh_username_requested); + ssh_agent_attempts.push(username.to_string()); + return git2::Cred::ssh_key_from_agent(username) + } + + // Sometimes libgit2 will ask for a username/password in plaintext. This + // is where Cargo would have an interactive prompt if we supported it, + // but we currently don't! Right now the only way we support fetching a + // plaintext password is through the `credential.helper` support, so + // fetch that here. + if allowed.contains(git2::USER_PASS_PLAINTEXT) { + let r = git2::Cred::credential_helper(cfg, url, username); + cred_helper_bad = Some(r.is_err()); + return r + } + + // I'm... not sure what the DEFAULT kind of authentication is, but seems + // easy to support? + if allowed.contains(git2::DEFAULT) { + return git2::Cred::default() + } + + // Whelp, we tried our best + Err(git2::Error::from_str("no authentication available")) + }); + + // Ok, so if it looks like we're going to be doing ssh authentication, we + // want to try a few different usernames as one wasn't specified in the URL + // for us to use. In order, we'll try: + // + // * A credential helper's username for this URL, if available. + // * This account's username. + // * "git" + // + // We have to restart the authentication session each time (due to + // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we + // call our callback, `f`, in a loop here. + if ssh_username_requested { + debug_assert!(res.is_err()); + let mut attempts = Vec::new(); + attempts.push("git".to_string()); + if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) { + attempts.push(s); + } + if let Some(ref s) = cred_helper.username { + attempts.push(s.clone()); + } + + while let Some(s) = attempts.pop() { + // We should get `USERNAME` first, where we just return our attempt, + // and then after that we should get `SSH_KEY`. If the first attempt + // fails we'll get called again, but we don't have another option so + // we bail out. + let mut attempts = 0; + res = f(&mut |_url, username, allowed| { + if allowed.contains(git2::USERNAME) { + return git2::Cred::username(&s); + } + if allowed.contains(git2::SSH_KEY) { + debug_assert_eq!(Some(&s[..]), username); + attempts += 1; + if attempts == 1 { + ssh_agent_attempts.push(s.to_string()); + return git2::Cred::ssh_key_from_agent(&s) + } + } + Err(git2::Error::from_str("no authentication available")) + }); + + // If we made two attempts then that means: + // + // 1. A username was requested, we returned `s`. + // 2. An ssh key was requested, we returned to look up `s` in the + // ssh agent. + // 3. For whatever reason that lookup failed, so we were asked again + // for another mode of authentication. + // + // Essentially, if `attempts == 2` then in theory the only error was + // that this username failed to authenticate (e.g. no other network + // errors happened). Otherwise something else is funny so we bail + // out. + if attempts != 2 { + break + } + } + } + + if res.is_ok() || !any_attempts { + return res.map_err(From::from) + } + + // In the case of an authentication failure (where we tried something) then + // we try to give a more helpful error message about precisely what we + // tried. + res.map_err(CargoError::from).map_err(|e| e.into_internal()).chain_err(|| { + let mut msg = "failed to authenticate when downloading \ + repository".to_string(); + if !ssh_agent_attempts.is_empty() { + let names = ssh_agent_attempts.iter() + .map(|s| format!("`{}`", s)) + .collect::>() + .join(", "); + msg.push_str(&format!("\nattempted ssh-agent authentication, but \ + none of the usernames {} succeeded", names)); + } + if let Some(failed_cred_helper) = cred_helper_bad { + if failed_cred_helper { + msg.push_str("\nattempted to find username/password via \ + git's `credential.helper` support, but failed"); + } else { + msg.push_str("\nattempted to find username/password via \ + `credential.helper`, but maybe the found \ + credentials were incorrect"); + } + } + msg + }) +} + +pub fn fetch(repo: &mut git2::Repository, + url: &Url, + refspec: &str, + config: &Config) -> CargoResult<()> { + if !config.network_allowed() { + bail!("attempting to update a git repository, but --frozen \ + was specified") + } + + // If we're fetching from github, attempt github's special fast path for + // testing if we've already got an up-to-date copy of the repository + if url.host_str() == Some("github.com") { + if let Ok(oid) = repo.refname_to_id("refs/remotes/origin/master") { + let mut handle = config.http()?.borrow_mut(); + debug!("attempting github fast path for {}", url); + if github_up_to_date(&mut handle, url, &oid) { + return Ok(()) + } else { + debug!("fast path failed, falling back to a git fetch"); + } + } + } + + // We reuse repositories quite a lot, so before we go through and update the + // repo check to see if it's a little too old and could benefit from a gc. + // In theory this shouldn't be too too expensive compared to the network + // request we're about to issue. + maybe_gc_repo(repo)?; + + debug!("doing a fetch for {}", url); + with_authentication(url.as_str(), &repo.config()?, |f| { + let mut cb = git2::RemoteCallbacks::new(); + cb.credentials(f); + + // Create a local anonymous remote in the repository to fetch the url + let mut remote = repo.remote_anonymous(url.as_str())?; + let mut opts = git2::FetchOptions::new(); + opts.remote_callbacks(cb) + .download_tags(git2::AutotagOption::All); + + network::with_retry(config, || { + debug!("initiating fetch of {} from {}", refspec, url); + remote.fetch(&[refspec], Some(&mut opts), None) + .map_err(CargoError::from) + })?; + Ok(()) + }) +} + +/// Cargo has a bunch of long-lived git repositories in its global cache and +/// some, like the index, are updated very frequently. Right now each update +/// creates a new "pack file" inside the git database, and over time this can +/// cause bad performance and bad current behavior in libgit2. +/// +/// One pathological use case today is where libgit2 opens hundreds of file +/// descriptors, getting us dangerously close to blowing out the OS limits of +/// how many fds we can have open. This is detailed in #4403. +/// +/// To try to combat this problem we attempt a `git gc` here. Note, though, that +/// we may not even have `git` installed on the system! As a result we +/// opportunistically try a `git gc` when the pack directory looks too big, and +/// failing that we just blow away the repository and start over. +fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { + // Here we arbitrarily declare that if you have more than 100 files in your + // `pack` folder that we need to do a gc. + let entries = match repo.path().join("objects/pack").read_dir() { + Ok(e) => e.count(), + Err(_) => { + debug!("skipping gc as pack dir appears gone"); + return Ok(()) + } + }; + let max = env::var("__CARGO_PACKFILE_LIMIT").ok() + .and_then(|s| s.parse::().ok()) + .unwrap_or(100); + if entries < max { + debug!("skipping gc as there's only {} pack files", entries); + return Ok(()) + } + + // First up, try a literal `git gc` by shelling out to git. This is pretty + // likely to fail though as we may not have `git` installed. Note that + // libgit2 doesn't currently implement the gc operation, so there's no + // equivalent there. + match Command::new("git").arg("gc").current_dir(repo.path()).output() { + Ok(out) => { + debug!("git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", + out.status, + String::from_utf8_lossy(&out.stdout), + String::from_utf8_lossy(&out.stderr)); + if out.status.success() { + let new = git2::Repository::open(repo.path())?; + mem::replace(repo, new); + return Ok(()) + } + } + Err(e) => debug!("git-gc failed to spawn: {}", e), + } + + // Alright all else failed, let's start over. + // + // Here we want to drop the current repository object pointed to by `repo`, + // so we initialize temporary repository in a sub-folder, blow away the + // existing git folder, and then recreate the git repo. Finally we blow away + // the `tmp` folder we allocated. + let path = repo.path().to_path_buf(); + let tmp = path.join("tmp"); + mem::replace(repo, git2::Repository::init(&tmp)?); + for entry in path.read_dir()? { + let entry = entry?; + if entry.file_name().to_str() == Some("tmp") { + continue + } + let path = entry.path(); + drop(fs::remove_file(&path).or_else(|_| fs::remove_dir_all(&path))); + } + if repo.is_bare() { + mem::replace(repo, git2::Repository::init_bare(path)?); + } else { + mem::replace(repo, git2::Repository::init(path)?); + } + fs::remove_dir_all(&tmp).chain_err(|| { + format!("failed to remove {:?}", tmp) + })?; + Ok(()) +} + +/// Updating the index is done pretty regularly so we want it to be as fast as +/// possible. For registries hosted on github (like the crates.io index) there's +/// a fast path available to use [1] to tell us that there's no updates to be +/// made. +/// +/// This function will attempt to hit that fast path and verify that the `oid` +/// is actually the current `master` branch of the repository. If `true` is +/// returned then no update needs to be performed, but if `false` is returned +/// then the standard update logic still needs to happen. +/// +/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference +/// +/// Note that this function should never cause an actual failure because it's +/// just a fast path. As a result all errors are ignored in this function and we +/// just return a `bool`. Any real errors will be reported through the normal +/// update path above. +fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool { + macro_rules! try { + ($e:expr) => (match $e { + Some(e) => e, + None => return false, + }) + } + + // This expects github urls in the form `github.com/user/repo` and nothing + // else + let mut pieces = try!(url.path_segments()); + let username = try!(pieces.next()); + let repo = try!(pieces.next()); + if pieces.next().is_some() { + return false + } + + let url = format!("https://api.github.com/repos/{}/{}/commits/master", + username, repo); + try!(handle.get(true).ok()); + try!(handle.url(&url).ok()); + try!(handle.useragent("cargo").ok()); + let mut headers = List::new(); + try!(headers.append("Accept: application/vnd.github.3.sha").ok()); + try!(headers.append(&format!("If-None-Match: \"{}\"", oid)).ok()); + try!(handle.http_headers(headers).ok()); + try!(handle.perform().ok()); + + try!(handle.response_code().ok()) == 304 +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/mod.rs new file mode 100644 index 000000000..ed784e95a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/mod.rs @@ -0,0 +1,13 @@ +pub use self::config::SourceConfigMap; +pub use self::directory::DirectorySource; +pub use self::git::GitSource; +pub use self::path::PathSource; +pub use self::registry::{RegistrySource, CRATES_IO}; +pub use self::replaced::ReplacedSource; + +pub mod config; +pub mod directory; +pub mod git; +pub mod path; +pub mod registry; +pub mod replaced; diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/path.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/path.rs new file mode 100644 index 000000000..11760c29a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/path.rs @@ -0,0 +1,544 @@ +use std::fmt::{self, Debug, Formatter}; +use std::fs; +use std::path::{Path, PathBuf}; + +use filetime::FileTime; +use git2; +use glob::Pattern; +use ignore::Match; +use ignore::gitignore::GitignoreBuilder; + +use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; +use ops; +use util::{self, CargoError, CargoResult, internal}; +use util::Config; + +pub struct PathSource<'cfg> { + source_id: SourceId, + path: PathBuf, + updated: bool, + packages: Vec, + config: &'cfg Config, + recursive: bool, +} + +impl<'cfg> PathSource<'cfg> { + /// Invoked with an absolute path to a directory that contains a Cargo.toml. + /// + /// This source will only return the package at precisely the `path` + /// specified, and it will be an error if there's not a package at `path`. + pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) + -> PathSource<'cfg> { + PathSource { + source_id: id.clone(), + path: path.to_path_buf(), + updated: false, + packages: Vec::new(), + config: config, + recursive: false, + } + } + + /// Creates a new source which is walked recursively to discover packages. + /// + /// This is similar to the `new` method except that instead of requiring a + /// valid package to be present at `root` the folder is walked entirely to + /// crawl for packages. + /// + /// Note that this should be used with care and likely shouldn't be chosen + /// by default! + pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) + -> PathSource<'cfg> { + PathSource { + recursive: true, + .. PathSource::new(root, id, config) + } + } + + pub fn root_package(&mut self) -> CargoResult { + trace!("root_package; source={:?}", self); + + self.update()?; + + match self.packages.iter().find(|p| p.root() == &*self.path) { + Some(pkg) => Ok(pkg.clone()), + None => Err(internal("no package found in source")) + } + } + + pub fn read_packages(&self) -> CargoResult> { + if self.updated { + Ok(self.packages.clone()) + } else if self.recursive { + ops::read_packages(&self.path, &self.source_id, self.config) + } else { + let path = self.path.join("Cargo.toml"); + let (pkg, _) = ops::read_package(&path, &self.source_id, self.config)?; + Ok(vec![pkg]) + } + } + + /// List all files relevant to building this package inside this source. + /// + /// This function will use the appropriate methods to determine the + /// set of files underneath this source's directory which are relevant for + /// building `pkg`. + /// + /// The basic assumption of this method is that all files in the directory + /// are relevant for building this package, but it also contains logic to + /// use other methods like .gitignore to filter the list of files. + /// + /// ## Pattern matching strategy + /// + /// Migrating from a glob-like pattern matching (using `glob` crate) to a + /// gitignore-like pattern matching (using `ignore` crate). The migration + /// stages are: + /// + /// 1) Only warn users about the future change iff their matching rules are + /// affected. (CURRENT STAGE) + /// + /// 2) Switch to the new strategy and upate documents. Still keep warning + /// affected users. + /// + /// 3) Drop the old strategy and no mor warnings. + /// + /// See for more info. + pub fn list_files(&self, pkg: &Package) -> CargoResult> { + let root = pkg.root(); + let no_include_option = pkg.manifest().include().is_empty(); + + // glob-like matching rules + + let glob_parse = |p: &String| { + let pattern: &str = if p.starts_with('/') { + &p[1..p.len()] + } else { + p + }; + Pattern::new(pattern).map_err(|e| { + CargoError::from(format!("could not parse glob pattern `{}`: {}", p, e)) + }) + }; + + let glob_exclude = pkg.manifest() + .exclude() + .iter() + .map(|p| glob_parse(p)) + .collect::, _>>()?; + + let glob_include = pkg.manifest() + .include() + .iter() + .map(|p| glob_parse(p)) + .collect::, _>>()?; + + let glob_should_package = |relative_path: &Path| -> bool { + fn glob_match(patterns: &Vec, relative_path: &Path) -> bool { + patterns.iter().any(|pattern| pattern.matches_path(relative_path)) + } + + // include and exclude options are mutually exclusive. + if no_include_option { + !glob_match(&glob_exclude, relative_path) + } else { + glob_match(&glob_include, relative_path) + } + }; + + // ignore-like matching rules + + let mut exclude_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().exclude() { + exclude_builder.add_line(None, rule)?; + } + let ignore_exclude = exclude_builder.build()?; + + let mut include_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().include() { + include_builder.add_line(None, rule)?; + } + let ignore_include = include_builder.build()?; + + let ignore_should_package = |relative_path: &Path| -> CargoResult { + // include and exclude options are mutually exclusive. + if no_include_option { + match ignore_exclude.matched_path_or_any_parents( + relative_path, + /* is_dir */ false, + ) { + Match::None => Ok(true), + Match::Ignore(_) => Ok(false), + Match::Whitelist(pattern) => Err(CargoError::from(format!( + "exclude rules cannot start with `!`: {}", + pattern.original() + ))), + } + } else { + match ignore_include.matched_path_or_any_parents( + relative_path, + /* is_dir */ false, + ) { + Match::None => Ok(false), + Match::Ignore(_) => Ok(true), + Match::Whitelist(pattern) => Err(CargoError::from(format!( + "include rules cannot start with `!`: {}", + pattern.original() + ))), + } + } + }; + + // matching to paths + + let mut filter = |path: &Path| -> CargoResult { + let relative_path = util::without_prefix(path, root).unwrap(); + let glob_should_package = glob_should_package(relative_path); + let ignore_should_package = ignore_should_package(relative_path)?; + + if glob_should_package != ignore_should_package { + if glob_should_package { + if no_include_option { + self.config + .shell() + .warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL be excluded in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } else { + self.config + .shell() + .warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL NOT be included in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } + } else if no_include_option { + self.config + .shell() + .warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL NOT be excluded in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } else { + self.config + .shell() + .warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL be included in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } + } + + // Update to ignore_should_package for Stage 2 + Ok(glob_should_package) + }; + + // attempt git-prepopulate only if no `include` (rust-lang/cargo#4135) + if no_include_option { + if let Some(result) = self.discover_git_and_list_files(pkg, root, &mut filter) { + return result; + } + } + self.list_files_walk(pkg, &mut filter) + } + + // Returns Some(_) if found sibling Cargo.toml and .git folder; + // otherwise caller should fall back on full file list. + fn discover_git_and_list_files(&self, + pkg: &Package, + root: &Path, + filter: &mut FnMut(&Path) -> CargoResult) + -> Option>> { + // If this package is in a git repository, then we really do want to + // query the git repository as it takes into account items such as + // .gitignore. We're not quite sure where the git repository is, + // however, so we do a bit of a probe. + // + // We walk this package's path upwards and look for a sibling + // Cargo.toml and .git folder. If we find one then we assume that we're + // part of that repository. + let mut cur = root; + loop { + if cur.join("Cargo.toml").is_file() { + // If we find a git repository next to this Cargo.toml, we still + // check to see if we are indeed part of the index. If not, then + // this is likely an unrelated git repo, so keep going. + if let Ok(repo) = git2::Repository::open(cur) { + let index = match repo.index() { + Ok(index) => index, + Err(err) => return Some(Err(err.into())), + }; + let path = util::without_prefix(root, cur) + .unwrap().join("Cargo.toml"); + if index.get_path(&path, 0).is_some() { + return Some(self.list_files_git(pkg, repo, filter)); + } + } + } + // don't cross submodule boundaries + if cur.join(".git").is_dir() { + break + } + match cur.parent() { + Some(parent) => cur = parent, + None => break, + } + } + None + } + + fn list_files_git(&self, pkg: &Package, repo: git2::Repository, + filter: &mut FnMut(&Path) -> CargoResult) + -> CargoResult> { + warn!("list_files_git {}", pkg.package_id()); + let index = repo.index()?; + let root = repo.workdir().ok_or_else(|| { + internal("Can't list files on a bare repository.") + })?; + let pkg_path = pkg.root(); + + let mut ret = Vec::::new(); + + // We use information from the git repository to guide us in traversing + // its tree. The primary purpose of this is to take advantage of the + // .gitignore and auto-ignore files that don't matter. + // + // Here we're also careful to look at both tracked and untracked files as + // the untracked files are often part of a build and may become relevant + // as part of a future commit. + let index_files = index.iter().map(|entry| { + use libgit2_sys::GIT_FILEMODE_COMMIT; + let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; + (join(root, &entry.path), Some(is_dir)) + }); + let mut opts = git2::StatusOptions::new(); + opts.include_untracked(true); + if let Some(suffix) = util::without_prefix(pkg_path, root) { + opts.pathspec(suffix); + } + let statuses = repo.statuses(Some(&mut opts))?; + let untracked = statuses.iter().filter_map(|entry| { + match entry.status() { + git2::STATUS_WT_NEW => Some((join(root, entry.path_bytes()), None)), + _ => None, + } + }); + + let mut subpackages_found = Vec::new(); + + for (file_path, is_dir) in index_files.chain(untracked) { + let file_path = file_path?; + + // Filter out files blatantly outside this package. This is helped a + // bit obove via the `pathspec` function call, but we need to filter + // the entries in the index as well. + if !file_path.starts_with(pkg_path) { + continue + } + + match file_path.file_name().and_then(|s| s.to_str()) { + // Filter out Cargo.lock and target always, we don't want to + // package a lock file no one will ever read and we also avoid + // build artifacts + Some("Cargo.lock") | + Some("target") => continue, + + // Keep track of all sub-packages found and also strip out all + // matches we've found so far. Note, though, that if we find + // our own `Cargo.toml` we keep going. + Some("Cargo.toml") => { + let path = file_path.parent().unwrap(); + if path != pkg_path { + warn!("subpackage found: {}", path.display()); + ret.retain(|p| !p.starts_with(path)); + subpackages_found.push(path.to_path_buf()); + continue + } + } + + _ => {} + } + + // If this file is part of any other sub-package we've found so far, + // skip it. + if subpackages_found.iter().any(|p| file_path.starts_with(p)) { + continue + } + + if is_dir.unwrap_or_else(|| file_path.is_dir()) { + warn!(" found submodule {}", file_path.display()); + let rel = util::without_prefix(&file_path, root).unwrap(); + let rel = rel.to_str().ok_or_else(|| { + CargoError::from(format!("invalid utf-8 filename: {}", rel.display())) + })?; + // Git submodules are currently only named through `/` path + // separators, explicitly not `\` which windows uses. Who knew? + let rel = rel.replace(r"\", "/"); + match repo.find_submodule(&rel).and_then(|s| s.open()) { + Ok(repo) => { + let files = self.list_files_git(pkg, repo, filter)?; + ret.extend(files.into_iter()); + } + Err(..) => { + PathSource::walk(&file_path, &mut ret, false, filter)?; + } + } + } else if (*filter)(&file_path)? { + // We found a file! + warn!(" found {}", file_path.display()); + ret.push(file_path); + } + } + return Ok(ret); + + #[cfg(unix)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(path.join(::from_bytes(data))) + } + #[cfg(windows)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(data) { + Ok(s) => Ok(path.join(s)), + Err(..) => Err(internal("cannot process path in git with a non \ + unicode filename")), + } + } + } + + fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> CargoResult) + -> CargoResult> { + let mut ret = Vec::new(); + PathSource::walk(pkg.root(), &mut ret, true, filter)?; + Ok(ret) + } + + fn walk(path: &Path, ret: &mut Vec, + is_root: bool, filter: &mut FnMut(&Path) -> CargoResult) + -> CargoResult<()> + { + if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) { + if (*filter)(path)? { + ret.push(path.to_path_buf()); + } + return Ok(()) + } + // Don't recurse into any sub-packages that we have + if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { + return Ok(()) + } + + // For package integration tests, we need to sort the paths in a deterministic order to + // be able to match stdout warnings in the same order. + // + // TODO: Drop collect and sort after transition period and dropping wraning tests. + // See + // and + let mut entries: Vec = fs::read_dir(path)?.map(|e| e.unwrap()).collect(); + entries.sort_by(|a, b| a.path().as_os_str().cmp(b.path().as_os_str())); + for entry in entries { + let path = entry.path(); + let name = path.file_name().and_then(|s| s.to_str()); + // Skip dotfile directories + if name.map(|s| s.starts_with('.')) == Some(true) { + continue + } else if is_root { + // Skip cargo artifacts + match name { + Some("target") | Some("Cargo.lock") => continue, + _ => {} + } + } + PathSource::walk(&path, ret, false, filter)?; + } + Ok(()) + } +} + +impl<'cfg> Debug for PathSource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "the paths source") + } +} + +impl<'cfg> Registry for PathSource<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + for s in self.packages.iter().map(|p| p.summary()) { + if dep.matches(s) { + f(s.clone()) + } + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } +} + +impl<'cfg> Source for PathSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + if !self.updated { + let packages = self.read_packages()?; + self.packages.extend(packages.into_iter()); + self.updated = true; + } + + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!("getting packages; id={}", id); + + let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); + pkg.cloned().ok_or_else(|| { + internal(format!("failed to find {} in path source", id)) + }) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + if !self.updated { + return Err(internal("BUG: source was not updated")); + } + + let mut max = FileTime::zero(); + let mut max_path = PathBuf::from(""); + for file in self.list_files(pkg)? { + // An fs::stat error here is either because path is a + // broken symlink, a permissions error, or a race + // condition where this path was rm'ed - either way, + // we can ignore the error and treat the path's mtime + // as 0. + let mtime = fs::metadata(&file).map(|meta| { + FileTime::from_last_modification_time(&meta) + }).unwrap_or(FileTime::zero()); + warn!("{} {}", mtime, file.display()); + if mtime > max { + max = mtime; + max_path = file; + } + } + trace!("fingerprint {}: {}", self.path.display(), max); + Ok(format!("{} ({})", max, max_path.display())) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/registry/index.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/index.rs new file mode 100644 index 000000000..14c8ab7d6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/index.rs @@ -0,0 +1,192 @@ +use std::collections::HashMap; +use std::path::Path; +use std::str; + +use serde_json; +use semver::Version; + +use core::dependency::Dependency; +use core::{SourceId, Summary, PackageId}; +use sources::registry::{RegistryPackage, INDEX_LOCK}; +use sources::registry::RegistryData; +use util::{CargoError, CargoResult, internal, Filesystem, Config}; + +pub struct RegistryIndex<'cfg> { + source_id: SourceId, + path: Filesystem, + cache: HashMap>, + hashes: HashMap>, // (name, vers) => cksum + config: &'cfg Config, + locked: bool, +} + +impl<'cfg> RegistryIndex<'cfg> { + pub fn new(id: &SourceId, + path: &Filesystem, + config: &'cfg Config, + locked: bool) + -> RegistryIndex<'cfg> { + RegistryIndex { + source_id: id.clone(), + path: path.clone(), + cache: HashMap::new(), + hashes: HashMap::new(), + config: config, + locked: locked, + } + } + + /// Return the hash listed for a specified PackageId. + pub fn hash(&mut self, + pkg: &PackageId, + load: &mut RegistryData) + -> CargoResult { + let name = pkg.name(); + let version = pkg.version(); + if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) { + return Ok(s.clone()) + } + // Ok, we're missing the key, so parse the index file to load it. + self.summaries(name, load)?; + self.hashes.get(name).and_then(|v| v.get(version)).ok_or_else(|| { + internal(format!("no hash listed for {}", pkg)) + }).map(|s| s.clone()) + } + + /// Parse the on-disk metadata for the package provided + /// + /// Returns a list of pairs of (summary, yanked) for the package name + /// specified. + pub fn summaries(&mut self, + name: &str, + load: &mut RegistryData) + -> CargoResult<&Vec<(Summary, bool)>> { + if self.cache.contains_key(name) { + return Ok(&self.cache[name]); + } + let summaries = self.load_summaries(name, load)?; + self.cache.insert(name.to_string(), summaries); + Ok(&self.cache[name]) + } + + fn load_summaries(&mut self, + name: &str, + load: &mut RegistryData) + -> CargoResult> { + let (root, _lock) = if self.locked { + let lock = self.path.open_ro(Path::new(INDEX_LOCK), + self.config, + "the registry index"); + match lock { + Ok(lock) => { + (lock.path().parent().unwrap().to_path_buf(), Some(lock)) + } + Err(_) => return Ok(Vec::new()), + } + } else { + (self.path.clone().into_path_unlocked(), None) + }; + + let fs_name = name.chars().flat_map(|c| { + c.to_lowercase() + }).collect::(); + + // see module comment for why this is structured the way it is + let path = match fs_name.len() { + 1 => format!("1/{}", fs_name), + 2 => format!("2/{}", fs_name), + 3 => format!("3/{}/{}", &fs_name[..1], fs_name), + _ => format!("{}/{}/{}", &fs_name[0..2], &fs_name[2..4], fs_name), + }; + let mut ret = Vec::new(); + let mut hit_closure = false; + let err = load.load(&root, Path::new(&path), &mut |contents| { + hit_closure = true; + let contents = str::from_utf8(contents).map_err(|_| { + CargoError::from("registry index file was not valid utf-8") + })?; + ret.reserve(contents.lines().count()); + let lines = contents.lines() + .map(|s| s.trim()) + .filter(|l| !l.is_empty()); + + // Attempt forwards-compatibility on the index by ignoring + // everything that we ourselves don't understand, that should + // allow future cargo implementations to break the + // interpretation of each line here and older cargo will simply + // ignore the new lines. + ret.extend(lines.filter_map(|line| { + self.parse_registry_package(line).ok() + })); + + Ok(()) + }); + + // We ignore lookup failures as those are just crates which don't exist + // or we haven't updated the registry yet. If we actually ran the + // closure though then we care about those errors. + if hit_closure { + err?; + } + + Ok(ret) + } + + /// Parse a line from the registry's index file into a Summary for a + /// package. + /// + /// The returned boolean is whether or not the summary has been yanked. + fn parse_registry_package(&mut self, line: &str) + -> CargoResult<(Summary, bool)> { + let RegistryPackage { + name, vers, cksum, deps, features, yanked + } = super::DEFAULT_ID.set(&self.source_id, || { + serde_json::from_str::(line) + })?; + let pkgid = PackageId::new(&name, &vers, &self.source_id)?; + let summary = Summary::new(pkgid, deps.inner, features)?; + let summary = summary.set_checksum(cksum.clone()); + if self.hashes.contains_key(&name[..]) { + self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum); + } else { + self.hashes.entry(name.into_owned()) + .or_insert_with(HashMap::new) + .insert(vers, cksum); + } + Ok((summary, yanked.unwrap_or(false))) + } + + pub fn query(&mut self, + dep: &Dependency, + load: &mut RegistryData, + f: &mut FnMut(Summary)) + -> CargoResult<()> { + let source_id = self.source_id.clone(); + let summaries = self.summaries(dep.name(), load)?; + let summaries = summaries.iter().filter(|&&(_, yanked)| { + dep.source_id().precise().is_some() || !yanked + }).map(|s| s.0.clone()); + + // Handle `cargo update --precise` here. If specified, our own source + // will have a precise version listed of the form `=` where + // `` is the name of a crate on this source and `` is the + // version requested (agument to `--precise`). + let summaries = summaries.filter(|s| { + match source_id.precise() { + Some(p) if p.starts_with(dep.name()) && + p[dep.name().len()..].starts_with('=') => { + let vers = &p[dep.name().len() + 1..]; + s.version().to_string() == vers + } + _ => true, + } + }); + + for summary in summaries { + if dep.matches(&summary) { + f(summary); + } + } + Ok(()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/registry/local.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/local.rs new file mode 100644 index 000000000..5803fd77d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/local.rs @@ -0,0 +1,105 @@ +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::Path; + +use core::PackageId; +use hex::ToHex; +use sources::registry::{RegistryData, RegistryConfig}; +use util::FileLock; +use util::paths; +use util::{Config, Sha256, Filesystem}; +use util::errors::{CargoResult, CargoResultExt}; + +pub struct LocalRegistry<'cfg> { + index_path: Filesystem, + root: Filesystem, + src_path: Filesystem, + config: &'cfg Config, +} + +impl<'cfg> LocalRegistry<'cfg> { + pub fn new(root: &Path, + config: &'cfg Config, + name: &str) -> LocalRegistry<'cfg> { + LocalRegistry { + src_path: config.registry_source_path().join(name), + index_path: Filesystem::new(root.join("index")), + root: Filesystem::new(root.to_path_buf()), + config: config, + } + } +} + +impl<'cfg> RegistryData for LocalRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn load(&self, + root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> { + data(&paths::read_bytes(&root.join(path))?) + } + + fn config(&mut self) -> CargoResult> { + // Local registries don't have configuration for remote APIs or anything + // like that + Ok(None) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Nothing to update, we just use what's on disk. Verify it actually + // exists though. We don't use any locks as we're just checking whether + // these directories exist. + let root = self.root.clone().into_path_unlocked(); + if !root.is_dir() { + bail!("local registry path is not a directory: {}", + root.display()) + } + let index_path = self.index_path.clone().into_path_unlocked(); + if !index_path.is_dir() { + bail!("local registry index path is not a directory: {}", + index_path.display()) + } + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); + let mut crate_file = self.root.open_ro(&crate_file, + self.config, + "crate file")?; + + // If we've already got an unpacked version of this crate, then skip the + // checksum below as it is in theory already verified. + let dst = format!("{}-{}", pkg.name(), pkg.version()); + if self.src_path.join(dst).into_path_unlocked().exists() { + return Ok(crate_file) + } + + self.config.shell().status("Unpacking", pkg)?; + + // We don't actually need to download anything per-se, we just need to + // verify the checksum matches the .crate file itself. + let mut state = Sha256::new(); + let mut buf = [0; 64 * 1024]; + loop { + let n = crate_file.read(&mut buf).chain_err(|| { + format!("failed to read `{}`", crate_file.path().display()) + })?; + if n == 0 { + break + } + state.update(&buf[..n]); + } + if state.finish().to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + crate_file.seek(SeekFrom::Start(0))?; + + Ok(crate_file) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/registry/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/mod.rs new file mode 100644 index 000000000..c967e2ebc --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/mod.rs @@ -0,0 +1,516 @@ +//! A `Source` for registry-based packages. +//! +//! # What's a Registry? +//! +//! Registries are central locations where packages can be uploaded to, +//! discovered, and searched for. The purpose of a registry is to have a +//! location that serves as permanent storage for versions of a crate over time. +//! +//! Compared to git sources, a registry provides many packages as well as many +//! versions simultaneously. Git sources can also have commits deleted through +//! rebasings where registries cannot have their versions deleted. +//! +//! # The Index of a Registry +//! +//! One of the major difficulties with a registry is that hosting so many +//! packages may quickly run into performance problems when dealing with +//! dependency graphs. It's infeasible for cargo to download the entire contents +//! of the registry just to resolve one package's dependencies, for example. As +//! a result, cargo needs some efficient method of querying what packages are +//! available on a registry, what versions are available, and what the +//! dependencies for each version is. +//! +//! One method of doing so would be having the registry expose an HTTP endpoint +//! which can be queried with a list of packages and a response of their +//! dependencies and versions is returned. This is somewhat inefficient however +//! as we may have to hit the endpoint many times and we may have already +//! queried for much of the data locally already (for other packages, for +//! example). This also involves inventing a transport format between the +//! registry and Cargo itself, so this route was not taken. +//! +//! Instead, Cargo communicates with registries through a git repository +//! referred to as the Index. The Index of a registry is essentially an easily +//! query-able version of the registry's database for a list of versions of a +//! package as well as a list of dependencies for each version. +//! +//! Using git to host this index provides a number of benefits: +//! +//! * The entire index can be stored efficiently locally on disk. This means +//! that all queries of a registry can happen locally and don't need to touch +//! the network. +//! +//! * Updates of the index are quite efficient. Using git buys incremental +//! updates, compressed transmission, etc for free. The index must be updated +//! each time we need fresh information from a registry, but this is one +//! update of a git repository that probably hasn't changed a whole lot so +//! it shouldn't be too expensive. +//! +//! Additionally, each modification to the index is just appending a line at +//! the end of a file (the exact format is described later). This means that +//! the commits for an index are quite small and easily applied/compressable. +//! +//! ## The format of the Index +//! +//! The index is a store for the list of versions for all packages known, so its +//! format on disk is optimized slightly to ensure that `ls registry` doesn't +//! produce a list of all packages ever known. The index also wants to ensure +//! that there's not a million files which may actually end up hitting +//! filesystem limits at some point. To this end, a few decisions were made +//! about the format of the registry: +//! +//! 1. Each crate will have one file corresponding to it. Each version for a +//! crate will just be a line in this file. +//! 2. There will be two tiers of directories for crate names, under which +//! crates corresponding to those tiers will be located. +//! +//! As an example, this is an example hierarchy of an index: +//! +//! ```notrust +//! . +//! ├── 3 +//! │   └── u +//! │   └── url +//! ├── bz +//! │   └── ip +//! │   └── bzip2 +//! ├── config.json +//! ├── en +//! │   └── co +//! │   └── encoding +//! └── li +//!    ├── bg +//!    │   └── libgit2 +//!    └── nk +//!    └── link-config +//! ``` +//! +//! The root of the index contains a `config.json` file with a few entries +//! corresponding to the registry (see `RegistryConfig` below). +//! +//! Otherwise, there are three numbered directories (1, 2, 3) for crates with +//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the +//! crate files underneath them, while the 3 directory is sharded by the first +//! letter of the crate name. +//! +//! Otherwise the top-level directory contains many two-letter directory names, +//! each of which has many sub-folders with two letters. At the end of all these +//! are the actual crate files themselves. +//! +//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as +//! efficient lookup based on the crate name itself. +//! +//! ## Crate files +//! +//! Each file in the index is the history of one crate over time. Each line in +//! the file corresponds to one version of a crate, stored in JSON format (see +//! the `RegistryPackage` structure below). +//! +//! As new versions are published, new lines are appended to this file. The only +//! modifications to this file that should happen over time are yanks of a +//! particular version. +//! +//! # Downloading Packages +//! +//! The purpose of the Index was to provide an efficient method to resolve the +//! dependency graph for a package. So far we only required one network +//! interaction to update the registry's repository (yay!). After resolution has +//! been performed, however we need to download the contents of packages so we +//! can read the full manifest and build the source code. +//! +//! To accomplish this, this source's `download` method will make an HTTP +//! request per-package requested to download tarballs into a local cache. These +//! tarballs will then be unpacked into a destination folder. +//! +//! Note that because versions uploaded to the registry are frozen forever that +//! the HTTP download and unpacking can all be skipped if the version has +//! already been downloaded and unpacked. This caching allows us to only +//! download a package when absolutely necessary. +//! +//! # Filesystem Hierarchy +//! +//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: +//! +//! ```notrust +//! # A folder under which all registry metadata is hosted (similar to +//! # $HOME/.cargo/git) +//! $HOME/.cargo/registry/ +//! +//! # For each registry that cargo knows about (keyed by hostname + hash) +//! # there is a folder which is the checked out version of the index for +//! # the registry in this location. Note that this is done so cargo can +//! # support multiple registries simultaneously +//! index/ +//! registry1-/ +//! registry2-/ +//! ... +//! +//! # This folder is a cache for all downloaded tarballs from a registry. +//! # Once downloaded and verified, a tarball never changes. +//! cache/ +//! registry1-/-.crate +//! ... +//! +//! # Location in which all tarballs are unpacked. Each tarball is known to +//! # be frozen after downloading, so transitively this folder is also +//! # frozen once its unpacked (it's never unpacked again) +//! src/ +//! registry1-/-/... +//! ... +//! ``` + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::fmt; +use std::fs::File; +use std::path::{PathBuf, Path}; + +use flate2::read::GzDecoder; +use semver::Version; +use serde::de; +use tar::Archive; + +use core::{Source, SourceId, PackageId, Package, Summary, Registry}; +use core::dependency::{Dependency, Kind}; +use sources::PathSource; +use util::{CargoResult, Config, internal, FileLock, Filesystem}; +use util::errors::CargoResultExt; +use util::hex; + +const INDEX_LOCK: &'static str = ".cargo-index-lock"; +pub static CRATES_IO: &'static str = "https://github.com/rust-lang/crates.io-index"; + +pub struct RegistrySource<'cfg> { + source_id: SourceId, + src_path: Filesystem, + config: &'cfg Config, + updated: bool, + ops: Box, + index: index::RegistryIndex<'cfg>, + index_locked: bool, +} + +#[derive(Deserialize)] +pub struct RegistryConfig { + /// Download endpoint for all crates. This will be appended with + /// `///download` and then will be hit with an HTTP GET + /// request to download the tarball for a crate. + pub dl: String, + + /// API endpoint for the registry. This is what's actually hit to perform + /// operations like yanks, owner modifications, publish new crates, etc. + pub api: String, +} + +#[derive(Deserialize)] +struct RegistryPackage<'a> { + name: Cow<'a, str>, + vers: Version, + deps: DependencyList, + features: BTreeMap>, + cksum: String, + yanked: Option, +} + +struct DependencyList { + inner: Vec, +} + +#[derive(Deserialize)] +struct RegistryDependency<'a> { + name: Cow<'a, str>, + req: Cow<'a, str>, + features: Vec, + optional: bool, + default_features: bool, + target: Option>, + kind: Option>, +} + +pub trait RegistryData { + fn index_path(&self) -> &Filesystem; + fn load(&self, + _root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()>; + fn config(&mut self) -> CargoResult>; + fn update_index(&mut self) -> CargoResult<()>; + fn download(&mut self, + pkg: &PackageId, + checksum: &str) -> CargoResult; +} + +mod index; +mod remote; +mod local; + +fn short_name(id: &SourceId) -> String { + let hash = hex::short_hash(id); + let ident = id.url().host_str().unwrap_or("").to_string(); + format!("{}-{}", ident, hash) +} + +impl<'cfg> RegistrySource<'cfg> { + pub fn remote(source_id: &SourceId, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = remote::RemoteRegistry::new(source_id, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), true) + } + + pub fn local(source_id: &SourceId, + path: &Path, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = local::LocalRegistry::new(path, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), false) + } + + fn new(source_id: &SourceId, + config: &'cfg Config, + name: &str, + ops: Box, + index_locked: bool) -> RegistrySource<'cfg> { + RegistrySource { + src_path: config.registry_source_path().join(name), + config: config, + source_id: source_id.clone(), + updated: false, + index: index::RegistryIndex::new(source_id, + ops.index_path(), + config, + index_locked), + index_locked: index_locked, + ops: ops, + } + } + + /// Decode the configuration stored within the registry. + /// + /// This requires that the index has been at least checked out. + pub fn config(&mut self) -> CargoResult> { + self.ops.config() + } + + /// Unpacks a downloaded package into a location where it's ready to be + /// compiled. + /// + /// No action is taken if the source looks like it's already unpacked. + fn unpack_package(&self, + pkg: &PackageId, + tarball: &FileLock) + -> CargoResult { + let dst = self.src_path.join(&format!("{}-{}", pkg.name(), + pkg.version())); + dst.create_dir()?; + // Note that we've already got the `tarball` locked above, and that + // implies a lock on the unpacked destination as well, so this access + // via `into_path_unlocked` should be ok. + let dst = dst.into_path_unlocked(); + let ok = dst.join(".cargo-ok"); + if ok.exists() { + return Ok(dst) + } + + let gz = GzDecoder::new(tarball.file())?; + let mut tar = Archive::new(gz); + let prefix = dst.file_name().unwrap(); + let parent = dst.parent().unwrap(); + for entry in tar.entries()? { + let mut entry = entry.chain_err(|| "failed to iterate over archive")?; + let entry_path = entry.path() + .chain_err(|| "failed to read entry path")? + .into_owned(); + + // We're going to unpack this tarball into the global source + // directory, but we want to make sure that it doesn't accidentally + // (or maliciously) overwrite source code from other crates. Cargo + // itself should never generate a tarball that hits this error, and + // crates.io should also block uploads with these sorts of tarballs, + // but be extra sure by adding a check here as well. + if !entry_path.starts_with(prefix) { + return Err(format!("invalid tarball downloaded, contains \ + a file at {:?} which isn't under {:?}", + entry_path, prefix).into()) + } + + // Once that's verified, unpack the entry as usual. + entry.unpack_in(parent).chain_err(|| { + format!("failed to unpack entry at `{}`", entry_path.display()) + })?; + } + File::create(&ok)?; + Ok(dst.clone()) + } + + fn do_update(&mut self) -> CargoResult<()> { + self.ops.update_index()?; + let path = self.ops.index_path(); + self.index = index::RegistryIndex::new(&self.source_id, + path, + self.config, + self.index_locked); + Ok(()) + } +} + +impl<'cfg> Registry for RegistrySource<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + // If this is a precise dependency, then it came from a lockfile and in + // theory the registry is known to contain this version. If, however, we + // come back with no summaries, then our registry may need to be + // updated, so we fall back to performing a lazy update. + if dep.source_id().precise().is_some() && !self.updated { + let mut called = false; + self.index.query(dep, &mut *self.ops, &mut |s| { + called = true; + f(s); + })?; + if called { + return Ok(()) + } else { + self.do_update()?; + } + } + + self.index.query(dep, &mut *self.ops, f) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + false + } +} + +impl<'cfg> Source for RegistrySource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + // If we have an imprecise version then we don't know what we're going + // to look for, so we always attempt to perform an update here. + // + // If we have a precise version, then we'll update lazily during the + // querying phase. Note that precise in this case is only + // `Some("locked")` as other `Some` values indicate a `cargo update + // --precise` request + if self.source_id.precise() != Some("locked") { + self.do_update()?; + } + Ok(()) + } + + fn download(&mut self, package: &PackageId) -> CargoResult { + let hash = self.index.hash(package, &mut *self.ops)?; + let path = self.ops.download(package, &hash)?; + let path = self.unpack_package(package, &path).chain_err(|| { + internal(format!("failed to unpack package `{}`", package)) + })?; + let mut src = PathSource::new(&path, &self.source_id, self.config); + src.update()?; + let pkg = src.download(package)?; + + // Unfortunately the index and the actual Cargo.toml in the index can + // differ due to historical Cargo bugs. To paper over these we trash the + // *summary* loaded from the Cargo.toml we just downloaded with the one + // we loaded from the index. + let summaries = self.index.summaries(package.name(), &mut *self.ops)?; + let summary = summaries.iter().map(|s| &s.0).find(|s| { + s.package_id() == package + }).expect("summary not found"); + let mut manifest = pkg.manifest().clone(); + manifest.set_summary(summary.clone()); + Ok(Package::new(manifest, pkg.manifest_path())) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } +} + +// TODO: this is pretty unfortunate, ideally we'd use `DeserializeSeed` which +// is intended for "deserializing with context" but that means we couldn't +// use `#[derive(Deserialize)]` on `RegistryPackage` unfortunately. +// +// I'm told, however, that https://github.com/serde-rs/serde/pull/909 will solve +// all our problems here. Until that lands this thread local is just a +// workaround in the meantime. +// +// If you're reading this and find this thread local funny, check to see if that +// PR is merged. If it is then let's ditch this thread local! +scoped_thread_local!(static DEFAULT_ID: SourceId); + +impl<'de> de::Deserialize<'de> for DependencyList { + fn deserialize(deserializer: D) -> Result + where D: de::Deserializer<'de>, + { + return deserializer.deserialize_seq(Visitor); + + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = DependencyList; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a list of dependencies") + } + + fn visit_seq(self, mut seq: A) -> Result + where A: de::SeqAccess<'de>, + { + let mut ret = Vec::new(); + if let Some(size) = seq.size_hint() { + ret.reserve(size); + } + while let Some(element) = seq.next_element::()? { + ret.push(parse_registry_dependency(element).map_err(|e| { + de::Error::custom(e) + })?); + } + + Ok(DependencyList { inner: ret }) + } + } + } +} + +/// Converts an encoded dependency in the registry to a cargo dependency +fn parse_registry_dependency(dep: RegistryDependency) + -> CargoResult { + let RegistryDependency { + name, req, features, optional, default_features, target, kind + } = dep; + + let mut dep = DEFAULT_ID.with(|id| { + Dependency::parse_no_deprecated(&name, Some(&req), id) + })?; + let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { + "dev" => Kind::Development, + "build" => Kind::Build, + _ => Kind::Normal, + }; + + let platform = match target { + Some(target) => Some(target.parse()?), + None => None, + }; + + // Unfortunately older versions of cargo and/or the registry ended up + // publishing lots of entries where the features array contained the + // empty feature, "", inside. This confuses the resolution process much + // later on and these features aren't actually valid, so filter them all + // out here. + let features = features.into_iter().filter(|s| !s.is_empty()).collect(); + + dep.set_optional(optional) + .set_default_features(default_features) + .set_features(features) + .set_platform(platform) + .set_kind(kind); + Ok(dep) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/registry/remote.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/remote.rs new file mode 100644 index 000000000..6704282b8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/registry/remote.rs @@ -0,0 +1,260 @@ +use std::cell::{RefCell, Ref, Cell}; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::mem; +use std::path::Path; + +use git2; +use hex::ToHex; +use serde_json; + +use core::{PackageId, SourceId}; +use ops; +use sources::git; +use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK}; +use util::network; +use util::{FileLock, Filesystem, LazyCell}; +use util::{Config, Sha256, ToUrl}; +use util::errors::{CargoErrorKind, CargoResult, CargoResultExt}; + +pub struct RemoteRegistry<'cfg> { + index_path: Filesystem, + cache_path: Filesystem, + source_id: SourceId, + config: &'cfg Config, + tree: RefCell>>, + repo: LazyCell, + head: Cell>, +} + +impl<'cfg> RemoteRegistry<'cfg> { + pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) + -> RemoteRegistry<'cfg> { + RemoteRegistry { + index_path: config.registry_index_path().join(name), + cache_path: config.registry_cache_path().join(name), + source_id: source_id.clone(), + config: config, + tree: RefCell::new(None), + repo: LazyCell::new(), + head: Cell::new(None), + } + } + + fn repo(&self) -> CargoResult<&git2::Repository> { + self.repo.get_or_try_init(|| { + let path = self.index_path.clone().into_path_unlocked(); + + // Fast path without a lock + if let Ok(repo) = git2::Repository::open(&path) { + return Ok(repo) + } + + // Ok, now we need to lock and try the whole thing over again. + let lock = self.index_path.open_rw(Path::new(INDEX_LOCK), + self.config, + "the registry index")?; + match git2::Repository::open(&path) { + Ok(repo) => Ok(repo), + Err(_) => { + let _ = lock.remove_siblings(); + + // Note that we'd actually prefer to use a bare repository + // here as we're not actually going to check anything out. + // All versions of Cargo, though, share the same CARGO_HOME, + // so for compatibility with older Cargo which *does* do + // checkouts we make sure to initialize a new full + // repository (not a bare one). + // + // We should change this to `init_bare` whenever we feel + // like enough time has passed or if we change the directory + // that the folder is located in, such as by changing the + // hash at the end of the directory. + Ok(git2::Repository::init(&path)?) + } + } + }) + } + + fn head(&self) -> CargoResult { + if self.head.get().is_none() { + let oid = self.repo()?.refname_to_id("refs/remotes/origin/master")?; + self.head.set(Some(oid)); + } + Ok(self.head.get().unwrap()) + } + + fn tree(&self) -> CargoResult> { + { + let tree = self.tree.borrow(); + if tree.is_some() { + return Ok(Ref::map(tree, |s| s.as_ref().unwrap())) + } + } + let repo = self.repo()?; + let commit = repo.find_commit(self.head()?)?; + let tree = commit.tree()?; + + // Unfortunately in libgit2 the tree objects look like they've got a + // reference to the repository object which means that a tree cannot + // outlive the repository that it came from. Here we want to cache this + // tree, though, so to accomplish this we transmute it to a static + // lifetime. + // + // Note that we don't actually hand out the static lifetime, instead we + // only return a scoped one from this function. Additionally the repo + // we loaded from (above) lives as long as this object + // (`RemoteRegistry`) so we then just need to ensure that the tree is + // destroyed first in the destructor, hence the destructor on + // `RemoteRegistry` below. + let tree = unsafe { + mem::transmute::>(tree) + }; + *self.tree.borrow_mut() = Some(tree); + Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) + } +} + +impl<'cfg> RegistryData for RemoteRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn load(&self, + _root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> { + // Note that the index calls this method and the filesystem is locked + // in the index, so we don't need to worry about an `update_index` + // happening in a different process. + let repo = self.repo()?; + let tree = self.tree()?; + let entry = tree.get_path(path)?; + let object = entry.to_object(repo)?; + let blob = match object.as_blob() { + Some(blob) => blob, + None => bail!("path `{}` is not a blob in the git repo", path.display()), + }; + data(blob.content()) + } + + fn config(&mut self) -> CargoResult> { + self.repo()?; // create intermediate dirs and initialize the repo + let _lock = self.index_path.open_ro(Path::new(INDEX_LOCK), + self.config, + "the registry index")?; + let mut config = None; + self.load(Path::new(""), Path::new("config.json"), &mut |json| { + config = Some(serde_json::from_slice(json)?); + Ok(()) + })?; + Ok(config) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Ensure that we'll actually be able to acquire an HTTP handle later on + // once we start trying to download crates. This will weed out any + // problems with `.cargo/config` configuration related to HTTP. + // + // This way if there's a problem the error gets printed before we even + // hit the index, which may not actually read this configuration. + ops::http_handle(self.config)?; + + self.repo()?; + self.head.set(None); + *self.tree.borrow_mut() = None; + let _lock = self.index_path.open_rw(Path::new(INDEX_LOCK), + self.config, + "the registry index")?; + self.config.shell().status("Updating", + format!("registry `{}`", self.source_id.url()))?; + + // git fetch origin master + let url = self.source_id.url(); + let refspec = "refs/heads/master:refs/remotes/origin/master"; + let repo = self.repo.borrow_mut().unwrap(); + git::fetch(repo, url, refspec, self.config).chain_err(|| { + format!("failed to fetch `{}`", url) + })?; + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let path = Path::new(&filename); + + // Attempt to open an read-only copy first to avoid an exclusive write + // lock and also work with read-only filesystems. Note that we check the + // length of the file like below to handle interrupted downloads. + // + // If this fails then we fall through to the exclusive path where we may + // have to redownload the file. + if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { + let meta = dst.file().metadata()?; + if meta.len() > 0 { + return Ok(dst) + } + } + let mut dst = self.cache_path.open_rw(path, self.config, &filename)?; + let meta = dst.file().metadata()?; + if meta.len() > 0 { + return Ok(dst) + } + self.config.shell().status("Downloading", pkg)?; + + let config = self.config()?.unwrap(); + let mut url = config.dl.to_url()?; + url.path_segments_mut().unwrap() + .push(pkg.name()) + .push(&pkg.version().to_string()) + .push("download"); + + // TODO: don't download into memory, but ensure that if we ctrl-c a + // download we should resume either from the start or the middle + // on the next time + let url = url.to_string(); + let mut handle = self.config.http()?.borrow_mut(); + handle.get(true)?; + handle.url(&url)?; + handle.follow_location(true)?; + let mut state = Sha256::new(); + let mut body = Vec::new(); + network::with_retry(self.config, || { + state = Sha256::new(); + body = Vec::new(); + { + let mut handle = handle.transfer(); + handle.write_function(|buf| { + state.update(buf); + body.extend_from_slice(buf); + Ok(buf.len()) + })?; + handle.perform()?; + } + let code = handle.response_code()?; + if code != 200 && code != 0 { + let url = handle.effective_url()?.unwrap_or(&url); + Err(CargoErrorKind::HttpNot200(code, url.to_string()).into()) + } else { + Ok(()) + } + })?; + + // Verify what we just downloaded + if state.finish().to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + dst.write_all(&body)?; + dst.seek(SeekFrom::Start(0))?; + Ok(dst) + } +} + +impl<'cfg> Drop for RemoteRegistry<'cfg> { + fn drop(&mut self) { + // Just be sure to drop this before our other fields + self.tree.borrow_mut().take(); + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/sources/replaced.rs b/collector/compile-benchmarks/cargo/src/cargo/sources/replaced.rs new file mode 100644 index 000000000..5048f6186 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/sources/replaced.rs @@ -0,0 +1,75 @@ +use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId}; +use util::errors::{CargoResult, CargoResultExt}; + +pub struct ReplacedSource<'cfg> { + to_replace: SourceId, + replace_with: SourceId, + inner: Box, +} + +impl<'cfg> ReplacedSource<'cfg> { + pub fn new(to_replace: &SourceId, + replace_with: &SourceId, + src: Box) -> ReplacedSource<'cfg> { + ReplacedSource { + to_replace: to_replace.clone(), + replace_with: replace_with.clone(), + inner: src, + } + } +} + +impl<'cfg> Registry for ReplacedSource<'cfg> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (&self.replace_with, &self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner.query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }).chain_err(|| { + format!("failed to query replaced source `{}`", + self.to_replace) + }) + } + + fn supports_checksums(&self) -> bool { + self.inner.supports_checksums() + } + + fn requires_precise(&self) -> bool { + self.inner.requires_precise() + } +} + +impl<'cfg> Source for ReplacedSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.to_replace + } + + fn update(&mut self) -> CargoResult<()> { + self.inner.update().chain_err(|| { + format!("failed to update replaced source `{}`", + self.to_replace) + }) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + let id = id.with_source_id(&self.replace_with); + let pkg = self.inner.download(&id).chain_err(|| { + format!("failed to download replaced source `{}`", + self.to_replace) + })?; + Ok(pkg.map_source(&self.replace_with, &self.to_replace)) + } + + fn fingerprint(&self, id: &Package) -> CargoResult { + self.inner.fingerprint(id) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let id = id.with_source_id(&self.replace_with); + self.inner.verify(&id) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/cfg.rs b/collector/compile-benchmarks/cargo/src/cargo/util/cfg.rs new file mode 100644 index 000000000..341b24d6d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/cfg.rs @@ -0,0 +1,261 @@ +use std::str::{self, FromStr}; +use std::iter; +use std::fmt; + +use util::{CargoError, CargoResult}; + +#[derive(Clone, PartialEq, Debug)] +pub enum Cfg { + Name(String), + KeyPair(String, String), +} + +#[derive(Clone, PartialEq, Debug)] +pub enum CfgExpr { + Not(Box), + All(Vec), + Any(Vec), + Value(Cfg), +} + +#[derive(PartialEq)] +enum Token<'a> { + LeftParen, + RightParen, + Ident(&'a str), + Comma, + Equals, + String(&'a str), +} + +struct Tokenizer<'a> { + s: iter::Peekable>, + orig: &'a str, +} + +struct Parser<'a> { + t: iter::Peekable>, +} + +impl FromStr for Cfg { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.cfg()?; + if p.t.next().is_some() { + bail!("malformed cfg value or key/value pair: `{}`", s) + } + Ok(e) + } +} + +impl fmt::Display for Cfg { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Cfg::Name(ref s) => s.fmt(f), + Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v), + } + } +} + +impl CfgExpr { + pub fn matches(&self, cfg: &[Cfg]) -> bool { + match *self { + CfgExpr::Not(ref e) => !e.matches(cfg), + CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)), + CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)), + CfgExpr::Value(ref e) => cfg.contains(e), + } + } +} + +impl FromStr for CfgExpr { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.expr()?; + if p.t.next().is_some() { + bail!("can only have one cfg-expression, consider using all() or \ + any() explicitly") + } + Ok(e) + } +} + +impl fmt::Display for CfgExpr { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + CfgExpr::Not(ref e) => write!(f, "not({})", e), + CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)), + CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)), + CfgExpr::Value(ref e) => write!(f, "{}", e), + } + } +} + +struct CommaSep<'a, T: 'a>(&'a [T]); + +impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for (i, v) in self.0.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", v)?; + } + Ok(()) + } +} + +impl<'a> Parser<'a> { + fn new(s: &'a str) -> Parser<'a> { + Parser { + t: Tokenizer { + s: s.char_indices().peekable(), + orig: s, + }.peekable(), + } + } + + fn expr(&mut self) -> CargoResult { + match self.t.peek() { + Some(&Ok(Token::Ident(op @ "all"))) | + Some(&Ok(Token::Ident(op @ "any"))) => { + self.t.next(); + let mut e = Vec::new(); + self.eat(Token::LeftParen)?; + while !self.try(Token::RightParen) { + e.push(self.expr()?); + if !self.try(Token::Comma) { + self.eat(Token::RightParen)?; + break + } + } + if op == "all" { + Ok(CfgExpr::All(e)) + } else { + Ok(CfgExpr::Any(e)) + } + } + Some(&Ok(Token::Ident("not"))) => { + self.t.next(); + self.eat(Token::LeftParen)?; + let e = self.expr()?; + self.eat(Token::RightParen)?; + Ok(CfgExpr::Not(Box::new(e))) + } + Some(&Ok(..)) => self.cfg().map(CfgExpr::Value), + Some(&Err(..)) => { + Err(self.t.next().unwrap().err().unwrap()) + } + None => bail!("expected start of a cfg expression, \ + found nothing"), + } + } + + fn cfg(&mut self) -> CargoResult { + match self.t.next() { + Some(Ok(Token::Ident(name))) => { + let e = if self.try(Token::Equals) { + let val = match self.t.next() { + Some(Ok(Token::String(s))) => s, + Some(Ok(t)) => bail!("expected a string, found {}", + t.classify()), + Some(Err(e)) => return Err(e), + None => bail!("expected a string, found nothing"), + }; + Cfg::KeyPair(name.to_string(), val.to_string()) + } else { + Cfg::Name(name.to_string()) + }; + Ok(e) + } + Some(Ok(t)) => bail!("expected identifier, found {}", t.classify()), + Some(Err(e)) => Err(e), + None => bail!("expected identifier, found nothing"), + } + } + + fn try(&mut self, token: Token<'a>) -> bool { + match self.t.peek() { + Some(&Ok(ref t)) if token == *t => {} + _ => return false, + } + self.t.next(); + true + } + + fn eat(&mut self, token: Token<'a>) -> CargoResult<()> { + match self.t.next() { + Some(Ok(ref t)) if token == *t => Ok(()), + Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), + t.classify()), + Some(Err(e)) => Err(e), + None => bail!("expected {}, but cfg expr ended", token.classify()), + } + } +} + +impl<'a> Iterator for Tokenizer<'a> { + type Item = CargoResult>; + + fn next(&mut self) -> Option>> { + loop { + match self.s.next() { + Some((_, ' ')) => {} + Some((_, '(')) => return Some(Ok(Token::LeftParen)), + Some((_, ')')) => return Some(Ok(Token::RightParen)), + Some((_, ',')) => return Some(Ok(Token::Comma)), + Some((_, '=')) => return Some(Ok(Token::Equals)), + Some((start, '"')) => { + while let Some((end, ch)) = self.s.next() { + if ch == '"' { + return Some(Ok(Token::String(&self.orig[start+1..end]))) + } + } + return Some(Err("unterminated string in cfg".into())) + } + Some((start, ch)) if is_ident_start(ch) => { + while let Some(&(end, ch)) = self.s.peek() { + if !is_ident_rest(ch) { + return Some(Ok(Token::Ident(&self.orig[start..end]))) + } else { + self.s.next(); + } + } + return Some(Ok(Token::Ident(&self.orig[start..]))) + } + Some((_, ch)) => { + return Some(Err(format!("unexpected character in \ + cfg `{}`, expected parens, \ + a comma, an identifier, or \ + a string", ch).into())) + } + None => return None + } + } + } +} + +fn is_ident_start(ch: char) -> bool { + ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') +} + +fn is_ident_rest(ch: char) -> bool { + is_ident_start(ch) || ('0' <= ch && ch <= '9') +} + +impl<'a> Token<'a> { + fn classify(&self) -> &str { + match *self { + Token::LeftParen => "`(`", + Token::RightParen => "`)`", + Token::Ident(..) => "an identifier", + Token::Comma => "`,`", + Token::Equals => "`=`", + Token::String(..) => "a string", + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/config.rs b/collector/compile-benchmarks/cargo/src/cargo/util/config.rs new file mode 100644 index 000000000..039993567 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/config.rs @@ -0,0 +1,882 @@ +use std::cell::{RefCell, RefMut}; +use std::collections::HashSet; +use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::collections::hash_map::HashMap; +use std::env; +use std::fmt; +use std::fs::{self, File}; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::mem; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::{Once, ONCE_INIT}; + +use curl::easy::Easy; +use jobserver; +use serde::{Serialize, Serializer}; +use toml; + +use core::shell::Verbosity; +use core::{Shell, CliUnstable}; +use ops; +use util::Rustc; +use util::errors::{CargoResult, CargoResultExt, CargoError, internal}; +use util::paths; +use util::toml as cargo_toml; +use util::{Filesystem, LazyCell}; + +use self::ConfigValue as CV; + +/// Configuration information for cargo. This is not specific to a build, it is information +/// relating to cargo itself. +/// +/// This struct implements `Default`: all fields can be inferred. +#[derive(Debug)] +pub struct Config { + /// The location of the users's 'home' directory. OS-dependent. + home_path: Filesystem, + /// Information about how to write messages to the shell + shell: RefCell, + /// Information on how to invoke the compiler (rustc) + rustc: LazyCell, + /// A collection of configuration options + values: LazyCell>, + /// The current working directory of cargo + cwd: PathBuf, + /// The location of the cargo executable (path to current process) + cargo_exe: LazyCell, + /// The location of the rustdoc executable + rustdoc: LazyCell, + /// Whether we are printing extra verbose messages + extra_verbose: bool, + /// `frozen` is set if we shouldn't access the network + frozen: bool, + /// `locked` is set if we should not update lock files + locked: bool, + /// A global static IPC control mechanism (used for managing parallel builds) + jobserver: Option, + /// Cli flags of the form "-Z something" + cli_flags: CliUnstable, + /// A handle on curl easy mode for http calls + easy: LazyCell>, +} + +impl Config { + pub fn new(shell: Shell, + cwd: PathBuf, + homedir: PathBuf) -> Config { + static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; + static INIT: Once = ONCE_INIT; + + // This should be called early on in the process, so in theory the + // unsafety is ok here. (taken ownership of random fds) + INIT.call_once(|| unsafe { + if let Some(client) = jobserver::Client::from_env() { + GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); + } + }); + + Config { + home_path: Filesystem::new(homedir), + shell: RefCell::new(shell), + rustc: LazyCell::new(), + cwd: cwd, + values: LazyCell::new(), + cargo_exe: LazyCell::new(), + rustdoc: LazyCell::new(), + extra_verbose: false, + frozen: false, + locked: false, + jobserver: unsafe { + if GLOBAL_JOBSERVER.is_null() { + None + } else { + Some((*GLOBAL_JOBSERVER).clone()) + } + }, + cli_flags: CliUnstable::default(), + easy: LazyCell::new(), + } + } + + pub fn default() -> CargoResult { + let shell = Shell::new(); + let cwd = env::current_dir().chain_err(|| { + "couldn't get the current directory of the process" + })?; + let homedir = homedir(&cwd).ok_or_else(|| { + "Cargo couldn't find your home directory. \ + This probably means that $HOME was not set." + })?; + Ok(Config::new(shell, cwd, homedir)) + } + + /// The user's cargo home directory (OS-dependent) + pub fn home(&self) -> &Filesystem { &self.home_path } + + /// The cargo git directory (`/git`) + pub fn git_path(&self) -> Filesystem { + self.home_path.join("git") + } + + /// The cargo registry index directory (`/registry/index`) + pub fn registry_index_path(&self) -> Filesystem { + self.home_path.join("registry").join("index") + } + + /// The cargo registry cache directory (`/registry/path`) + pub fn registry_cache_path(&self) -> Filesystem { + self.home_path.join("registry").join("cache") + } + + /// The cargo registry source directory (`/registry/src`) + pub fn registry_source_path(&self) -> Filesystem { + self.home_path.join("registry").join("src") + } + + /// Get a reference to the shell, for e.g. writing error messages + pub fn shell(&self) -> RefMut { + self.shell.borrow_mut() + } + + /// Get the path to the `rustdoc` executable + pub fn rustdoc(&self) -> CargoResult<&Path> { + self.rustdoc.get_or_try_init(|| self.get_tool("rustdoc")).map(AsRef::as_ref) + } + + /// Get the path to the `rustc` executable + pub fn rustc(&self) -> CargoResult<&Rustc> { + self.rustc.get_or_try_init(|| Rustc::new(self.get_tool("rustc")?, + self.maybe_get_tool("rustc_wrapper")?)) + } + + /// Get the path to the `cargo` executable + pub fn cargo_exe(&self) -> CargoResult<&Path> { + self.cargo_exe.get_or_try_init(|| + env::current_exe().and_then(|path| path.canonicalize()) + .chain_err(|| "couldn't get the path to cargo executable") + ).map(AsRef::as_ref) + } + + pub fn values(&self) -> CargoResult<&HashMap> { + self.values.get_or_try_init(|| self.load_values()) + } + + pub fn set_values(&self, values: HashMap) -> CargoResult<()> { + if self.values.borrow().is_some() { + return Err("Config values already found".into()); + } + match self.values.fill(values) { + Ok(()) => Ok(()), + Err(_) => Err("Could not fill values".into()), + } + } + + pub fn cwd(&self) -> &Path { &self.cwd } + + pub fn target_dir(&self) -> CargoResult> { + if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { + Ok(Some(Filesystem::new(self.cwd.join(dir)))) + } else if let Some(val) = self.get_path("build.target-dir")? { + let val = self.cwd.join(val.val); + Ok(Some(Filesystem::new(val))) + } else { + Ok(None) + } + } + + fn get(&self, key: &str) -> CargoResult> { + let vals = self.values()?; + let mut parts = key.split('.').enumerate(); + let mut val = match vals.get(parts.next().unwrap().1) { + Some(val) => val, + None => return Ok(None), + }; + for (i, part) in parts { + match *val { + CV::Table(ref map, _) => { + val = match map.get(part) { + Some(val) => val, + None => return Ok(None), + } + } + CV::Integer(_, ref path) | + CV::String(_, ref path) | + CV::List(_, ref path) | + CV::Boolean(_, ref path) => { + let idx = key.split('.').take(i) + .fold(0, |n, s| n + s.len()) + i - 1; + let key_so_far = &key[..idx]; + bail!("expected table for configuration key `{}`, \ + but found {} in {}", + key_so_far, val.desc(), path.display()) + } + } + } + Ok(Some(val.clone())) + } + + fn get_env(&self, key: &str) -> CargoResult>> + where CargoError: From + { + let key = key.replace(".", "_") + .replace("-", "_") + .chars() + .flat_map(|c| c.to_uppercase()) + .collect::(); + match env::var(&format!("CARGO_{}", key)) { + Ok(value) => { + Ok(Some(Value { + val: value.parse()?, + definition: Definition::Environment, + })) + } + Err(..) => Ok(None), + } + } + + pub fn get_string(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)) + } + match self.get(key)? { + Some(CV::String(i, path)) => { + Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("string", key, val), + None => Ok(None), + } + } + + pub fn get_bool(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)) + } + match self.get(key)? { + Some(CV::Boolean(b, path)) => { + Ok(Some(Value { + val: b, + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("bool", key, val), + None => Ok(None), + } + } + + fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf { + let is_path = value.contains('/') || + (cfg!(windows) && value.contains('\\')); + if is_path { + definition.root(self).join(value) + } else { + // A pathless name + PathBuf::from(value) + } + } + + pub fn get_path(&self, key: &str) -> CargoResult>> { + if let Some(val) = self.get_string(key)? { + Ok(Some(Value { + val: self.string_to_path(val.val, &val.definition), + definition: val.definition + })) + } else { + Ok(None) + } + } + + pub fn get_path_and_args(&self, key: &str) + -> CargoResult)>>> { + if let Some(mut val) = self.get_list_or_split_string(key)? { + if !val.val.is_empty() { + return Ok(Some(Value { + val: (self.string_to_path(val.val.remove(0), &val.definition), val.val), + definition: val.definition + })); + } + } + Ok(None) + } + + pub fn get_list(&self, key: &str) + -> CargoResult>>> { + match self.get(key)? { + Some(CV::List(i, path)) => { + Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("list", key, val), + None => Ok(None), + } + } + + pub fn get_list_or_split_string(&self, key: &str) + -> CargoResult>>> { + match self.get_env::(key) { + Ok(Some(value)) => + return Ok(Some(Value { + val: value.val.split(' ').map(str::to_string).collect(), + definition: value.definition + })), + Err(err) => return Err(err), + Ok(None) => (), + } + + match self.get(key)? { + Some(CV::List(i, path)) => { + Ok(Some(Value { + val: i.into_iter().map(|(s, _)| s).collect(), + definition: Definition::Path(path), + })) + } + Some(CV::String(i, path)) => { + Ok(Some(Value { + val: i.split(' ').map(str::to_string).collect(), + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("list or string", key, val), + None => Ok(None), + } + } + + pub fn get_table(&self, key: &str) + -> CargoResult>>> { + match self.get(key)? { + Some(CV::Table(i, path)) => { + Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("table", key, val), + None => Ok(None), + } + } + + pub fn get_i64(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)) + } + match self.get(key)? { + Some(CV::Integer(i, path)) => { + Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })) + } + Some(val) => self.expected("integer", key, val), + None => Ok(None), + } + } + + pub fn net_retry(&self) -> CargoResult { + match self.get_i64("net.retry")? { + Some(v) => { + let value = v.val; + if value < 0 { + bail!("net.retry must be positive, but found {} in {}", + v.val, v.definition) + } else { + Ok(value) + } + } + None => Ok(2), + } + } + + pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { + val.expected(ty, key).map_err(|e| { + format!("invalid configuration for key `{}`\n{}", key, e).into() + }) + } + + pub fn configure(&mut self, + verbose: u32, + quiet: Option, + color: &Option, + frozen: bool, + locked: bool, + unstable_flags: &[String]) -> CargoResult<()> { + let extra_verbose = verbose >= 2; + let verbose = if verbose == 0 {None} else {Some(true)}; + + // Ignore errors in the configuration files. + let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val); + let cfg_color = self.get_string("term.color").unwrap_or(None).map(|v| v.val); + + let color = color.as_ref().or_else(|| cfg_color.as_ref()); + + let verbosity = match (verbose, cfg_verbose, quiet) { + (Some(true), _, None) | + (None, Some(true), None) => Verbosity::Verbose, + + // command line takes precedence over configuration, so ignore the + // configuration. + (None, _, Some(true)) => Verbosity::Quiet, + + // Can't pass both at the same time on the command line regardless + // of configuration. + (Some(true), _, Some(true)) => { + bail!("cannot set both --verbose and --quiet"); + } + + // Can't actually get `Some(false)` as a value from the command + // line, so just ignore them here to appease exhaustiveness checking + // in match statements. + (Some(false), _, _) | + (_, _, Some(false)) | + + (None, Some(false), None) | + (None, None, None) => Verbosity::Normal, + }; + + self.shell().set_verbosity(verbosity); + self.shell().set_color_choice(color.map(|s| &s[..]))?; + self.extra_verbose = extra_verbose; + self.frozen = frozen; + self.locked = locked; + self.cli_flags.parse(unstable_flags)?; + + Ok(()) + } + + pub fn cli_unstable(&self) -> &CliUnstable { + &self.cli_flags + } + + pub fn extra_verbose(&self) -> bool { + self.extra_verbose + } + + pub fn network_allowed(&self) -> bool { + !self.frozen + } + + pub fn lock_update_allowed(&self) -> bool { + !self.frozen && !self.locked + } + + /// Loads configuration from the filesystem + pub fn load_values(&self) -> CargoResult> { + let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); + + walk_tree(&self.cwd, |path| { + let mut contents = String::new(); + let mut file = File::open(&path)?; + file.read_to_string(&mut contents).chain_err(|| { + format!("failed to read configuration file `{}`", + path.display()) + })?; + let toml = cargo_toml::parse(&contents, + path, + self).chain_err(|| { + format!("could not parse TOML configuration in `{}`", + path.display()) + })?; + let value = CV::from_toml(path, toml).chain_err(|| { + format!("failed to load TOML configuration from `{}`", + path.display()) + })?; + cfg.merge(value).chain_err(|| { + format!("failed to merge configuration at `{}`", path.display()) + })?; + Ok(()) + }).chain_err(|| "Couldn't load Cargo configuration")?; + + self.load_credentials(&mut cfg)?; + match cfg { + CV::Table(map, _) => Ok(map), + _ => unreachable!(), + } + } + + /// Loads credentials config from the credentials file into the ConfigValue object, if present. + fn load_credentials(&self, cfg: &mut ConfigValue) -> CargoResult<()> { + let home_path = self.home_path.clone().into_path_unlocked(); + let credentials = home_path.join("credentials"); + if !fs::metadata(&credentials).is_ok() { + return Ok(()); + } + + let mut contents = String::new(); + let mut file = File::open(&credentials)?; + file.read_to_string(&mut contents).chain_err(|| { + format!("failed to read configuration file `{}`", credentials.display()) + })?; + + let toml = cargo_toml::parse(&contents, + &credentials, + self).chain_err(|| { + format!("could not parse TOML configuration in `{}`", credentials.display()) + })?; + + let value = CV::from_toml(&credentials, toml).chain_err(|| { + format!("failed to load TOML configuration from `{}`", credentials.display()) + })?; + + let cfg = match *cfg { + CV::Table(ref mut map, _) => map, + _ => unreachable!(), + }; + + let registry = cfg.entry("registry".into()) + .or_insert_with(|| CV::Table(HashMap::new(), PathBuf::from("."))); + + match (registry, value) { + (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { + // Take ownership of `new` by swapping it with an empty hashmap, so we can move + // into an iterator. + let new = mem::replace(new, HashMap::new()); + for (key, value) in new { + old.insert(key, value); + } + } + _ => unreachable!(), + } + + Ok(()) + } + + /// Look for a path for `tool` in an environment variable or config path, but return `None` + /// if it's not present. + fn maybe_get_tool(&self, tool: &str) -> CargoResult> { + let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::(); + if let Some(tool_path) = env::var_os(&var) { + return Ok(Some(PathBuf::from(tool_path))); + } + + let var = format!("build.{}", tool); + if let Some(tool_path) = self.get_path(&var)? { + return Ok(Some(tool_path.val)); + } + + Ok(None) + } + + /// Look for a path for `tool` in an environment variable or config path, defaulting to `tool` + /// as a path. + fn get_tool(&self, tool: &str) -> CargoResult { + self.maybe_get_tool(tool) + .map(|t| t.unwrap_or_else(|| PathBuf::from(tool))) + } + + pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> { + self.jobserver.as_ref() + } + + pub fn http(&self) -> CargoResult<&RefCell> { + self.easy.get_or_try_init(|| { + ops::http_handle(self).map(RefCell::new) + }) + } +} + +#[derive(Eq, PartialEq, Clone, Copy)] +pub enum Location { + Project, + Global +} + +#[derive(Eq,PartialEq,Clone,Deserialize)] +pub enum ConfigValue { + Integer(i64, PathBuf), + String(String, PathBuf), + List(Vec<(String, PathBuf)>, PathBuf), + Table(HashMap, PathBuf), + Boolean(bool, PathBuf), +} + +pub struct Value { + pub val: T, + pub definition: Definition, +} + +pub enum Definition { + Path(PathBuf), + Environment, +} + +impl fmt::Debug for ConfigValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + CV::Integer(i, ref path) => write!(f, "{} (from {})", i, + path.display()), + CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, + path.display()), + CV::String(ref s, ref path) => write!(f, "{} (from {})", s, + path.display()), + CV::List(ref list, ref path) => { + write!(f, "[")?; + for (i, &(ref s, ref path)) in list.iter().enumerate() { + if i > 0 { write!(f, ", ")?; } + write!(f, "{} (from {})", s, path.display())?; + } + write!(f, "] (from {})", path.display()) + } + CV::Table(ref table, _) => write!(f, "{:?}", table), + } + } +} + +impl Serialize for ConfigValue { + fn serialize(&self, s: S) -> Result { + match *self { + CV::String(ref string, _) => string.serialize(s), + CV::List(ref list, _) => { + let list: Vec<&String> = list.iter().map(|s| &s.0).collect(); + list.serialize(s) + } + CV::Table(ref table, _) => table.serialize(s), + CV::Boolean(b, _) => b.serialize(s), + CV::Integer(i, _) => i.serialize(s), + } + } +} + +impl ConfigValue { + fn from_toml(path: &Path, toml: toml::Value) -> CargoResult { + match toml { + toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), + toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), + toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), + toml::Value::Array(val) => { + Ok(CV::List(val.into_iter().map(|toml| { + match toml { + toml::Value::String(val) => Ok((val, path.to_path_buf())), + v => Err(format!("expected string but found {} \ + in list", v.type_str()).into()), + } + }).collect::>()?, path.to_path_buf())) + } + toml::Value::Table(val) => { + Ok(CV::Table(val.into_iter().map(|(key, value)| { + let value = CV::from_toml(path, value).chain_err(|| { + format!("failed to parse key `{}`", key) + })?; + Ok((key, value)) + }).collect::>()?, path.to_path_buf())) + } + v => bail!("found TOML configuration value of unknown type `{}`", + v.type_str()), + } + } + + fn into_toml(self) -> toml::Value { + match self { + CV::Boolean(s, _) => toml::Value::Boolean(s), + CV::String(s, _) => toml::Value::String(s), + CV::Integer(i, _) => toml::Value::Integer(i), + CV::List(l, _) => toml::Value::Array(l + .into_iter() + .map(|(s, _)| toml::Value::String(s)) + .collect()), + CV::Table(l, _) => toml::Value::Table(l.into_iter() + .map(|(k, v)| (k, v.into_toml())) + .collect()), + } + } + + fn merge(&mut self, from: ConfigValue) -> CargoResult<()> { + match (self, from) { + (&mut CV::String(..), CV::String(..)) | + (&mut CV::Integer(..), CV::Integer(..)) | + (&mut CV::Boolean(..), CV::Boolean(..)) => {} + (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { + let new = mem::replace(new, Vec::new()); + old.extend(new.into_iter()); + } + (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { + let new = mem::replace(new, HashMap::new()); + for (key, value) in new { + match old.entry(key.clone()) { + Occupied(mut entry) => { + let path = value.definition_path().to_path_buf(); + let entry = entry.get_mut(); + entry.merge(value).chain_err(|| { + format!("failed to merge key `{}` between \ + files:\n \ + file 1: {}\n \ + file 2: {}", + key, + entry.definition_path().display(), + path.display()) + + })?; + } + Vacant(entry) => { entry.insert(value); } + }; + } + } + (expected, found) => { + return Err(internal(format!("expected {}, but found {}", + expected.desc(), found.desc()))) + } + } + + Ok(()) + } + + pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> { + match *self { + CV::Integer(i, ref p) => Ok((i, p)), + _ => self.expected("integer", key), + } + } + + pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> { + match *self { + CV::String(ref s, ref p) => Ok((s, p)), + _ => self.expected("string", key), + } + } + + pub fn table(&self, key: &str) + -> CargoResult<(&HashMap, &Path)> { + match *self { + CV::Table(ref table, ref p) => Ok((table, p)), + _ => self.expected("table", key), + } + } + + pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> { + match *self { + CV::List(ref list, _) => Ok(list), + _ => self.expected("list", key), + } + } + + pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> { + match *self { + CV::Boolean(b, ref p) => Ok((b, p)), + _ => self.expected("bool", key), + } + } + + pub fn desc(&self) -> &'static str { + match *self { + CV::Table(..) => "table", + CV::List(..) => "array", + CV::String(..) => "string", + CV::Boolean(..) => "boolean", + CV::Integer(..) => "integer", + } + } + + pub fn definition_path(&self) -> &Path { + match *self { + CV::Boolean(_, ref p) | + CV::Integer(_, ref p) | + CV::String(_, ref p) | + CV::List(_, ref p) | + CV::Table(_, ref p) => p + } + } + + fn expected(&self, wanted: &str, key: &str) -> CargoResult { + Err(format!("expected a {}, but found a {} for `{}` in {}", + wanted, self.desc(), key, + self.definition_path().display()).into()) + } +} + +impl Definition { + pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { + match *self { + Definition::Path(ref p) => p.parent().unwrap().parent().unwrap(), + Definition::Environment => config.cwd(), + } + } +} + +impl fmt::Display for Definition { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Definition::Path(ref p) => p.display().fmt(f), + Definition::Environment => "the environment".fmt(f), + } + } +} + +pub fn homedir(cwd: &Path) -> Option { + ::home::cargo_home_with_cwd(cwd).ok() +} + +fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> + where F: FnMut(&Path) -> CargoResult<()> +{ + let mut stash: HashSet = HashSet::new(); + + for current in paths::ancestors(pwd) { + let possible = current.join(".cargo").join("config"); + if fs::metadata(&possible).is_ok() { + walk(&possible)?; + stash.insert(possible); + } + } + + // Once we're done, also be sure to walk the home directory even if it's not + // in our history to be sure we pick up that standard location for + // information. + let home = homedir(pwd).ok_or_else(|| { + CargoError::from("Cargo couldn't find your home directory. \ + This probably means that $HOME was not set.") + })?; + let config = home.join("config"); + if !stash.contains(&config) && fs::metadata(&config).is_ok() { + walk(&config)?; + } + + Ok(()) +} + +pub fn save_credentials(cfg: &Config, + token: String) -> CargoResult<()> { + let mut file = { + cfg.home_path.create_dir()?; + cfg.home_path.open_rw(Path::new("credentials"), cfg, + "credentials' config file")? + }; + + let mut contents = String::new(); + file.read_to_string(&mut contents).chain_err(|| { + format!("failed to read configuration file `{}`", + file.path().display()) + })?; + let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; + toml.as_table_mut() + .unwrap() + .insert("token".to_string(), + ConfigValue::String(token, file.path().to_path_buf()).into_toml()); + + let contents = toml.to_string(); + file.seek(SeekFrom::Start(0))?; + file.write_all(contents.as_bytes())?; + file.file().set_len(contents.len() as u64)?; + set_permissions(file.file(), 0o600)?; + + return Ok(()); + + #[cfg(unix)] + fn set_permissions(file: & File, mode: u32) -> CargoResult<()> { + use std::os::unix::fs::PermissionsExt; + + let mut perms = file.metadata()?.permissions(); + perms.set_mode(mode); + file.set_permissions(perms)?; + Ok(()) + } + + #[cfg(not(unix))] + #[allow(unused)] + fn set_permissions(file: & File, mode: u32) -> CargoResult<()> { + Ok(()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/dependency_queue.rs b/collector/compile-benchmarks/cargo/src/cargo/util/dependency_queue.rs new file mode 100644 index 000000000..efe3cba9d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/dependency_queue.rs @@ -0,0 +1,144 @@ +//! A graph-like structure used to represent a set of dependencies and in what +//! order they should be built. +//! +//! This structure is used to store the dependency graph and dynamically update +//! it to figure out when a dependency should be built. + +use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::collections::{HashMap, HashSet}; +use std::hash::Hash; + +pub use self::Freshness::{Fresh, Dirty}; + +#[derive(Debug)] +pub struct DependencyQueue { + /// A list of all known keys to build. + /// + /// The value of the hash map is list of dependencies which still need to be + /// built before the package can be built. Note that the set is dynamically + /// updated as more dependencies are built. + dep_map: HashMap, V)>, + + /// A reverse mapping of a package to all packages that depend on that + /// package. + /// + /// This map is statically known and does not get updated throughout the + /// lifecycle of the DependencyQueue. + reverse_dep_map: HashMap>, + + /// A set of dirty packages. + /// + /// Packages may become dirty over time if their dependencies are rebuilt. + dirty: HashSet, + + /// The packages which are currently being built, waiting for a call to + /// `finish`. + pending: HashSet, +} + +/// Indication of the freshness of a package. +/// +/// A fresh package does not necessarily need to be rebuilt (unless a dependency +/// was also rebuilt), and a dirty package must always be rebuilt. +#[derive(PartialEq, Eq, Debug, Clone, Copy)] +pub enum Freshness { + Fresh, + Dirty, +} + +impl Freshness { + pub fn combine(&self, other: Freshness) -> Freshness { + match *self { Fresh => other, Dirty => Dirty } + } +} + +impl Default for DependencyQueue { + fn default() -> DependencyQueue { + DependencyQueue::new() + } +} + +impl DependencyQueue { + /// Creates a new dependency queue with 0 packages. + pub fn new() -> DependencyQueue { + DependencyQueue { + dep_map: HashMap::new(), + reverse_dep_map: HashMap::new(), + dirty: HashSet::new(), + pending: HashSet::new(), + } + } + + /// Adds a new package to this dependency queue. + /// + /// It is assumed that any dependencies of this package will eventually also + /// be added to the dependency queue. + pub fn queue(&mut self, + fresh: Freshness, + key: K, + value: V, + dependencies: &[K]) -> &mut V { + let slot = match self.dep_map.entry(key.clone()) { + Occupied(v) => return &mut v.into_mut().1, + Vacant(v) => v, + }; + + if fresh == Dirty { + self.dirty.insert(key.clone()); + } + + let mut my_dependencies = HashSet::new(); + for dep in dependencies { + my_dependencies.insert(dep.clone()); + let rev = self.reverse_dep_map.entry(dep.clone()) + .or_insert_with(HashSet::new); + rev.insert(key.clone()); + } + &mut slot.insert((my_dependencies, value)).1 + } + + /// Dequeues a package that is ready to be built. + /// + /// A package is ready to be built when it has 0 un-built dependencies. If + /// `None` is returned then no packages are ready to be built. + pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> { + let key = match self.dep_map.iter() + .find(|&(_, &(ref deps, _))| deps.is_empty()) + .map(|(key, _)| key.clone()) { + Some(key) => key, + None => return None + }; + let (_, data) = self.dep_map.remove(&key).unwrap(); + let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh}; + self.pending.insert(key.clone()); + Some((fresh, key, data)) + } + + /// Returns whether there are remaining packages to be built. + pub fn is_empty(&self) -> bool { + self.dep_map.is_empty() && self.pending.is_empty() + } + + /// Returns the number of remaining packages to be built. + pub fn len(&self) -> usize { + self.dep_map.len() + self.pending.len() + } + + /// Indicate that a package has been built. + /// + /// This function will update the dependency queue with this information, + /// possibly allowing the next invocation of `dequeue` to return a package. + pub fn finish(&mut self, key: &K, fresh: Freshness) { + assert!(self.pending.remove(key)); + let reverse_deps = match self.reverse_dep_map.get(key) { + Some(deps) => deps, + None => return, + }; + for dep in reverse_deps.iter() { + if fresh == Dirty { + self.dirty.insert(dep.clone()); + } + assert!(self.dep_map.get_mut(dep).unwrap().0.remove(key)); + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/errors.rs b/collector/compile-benchmarks/cargo/src/cargo/util/errors.rs new file mode 100644 index 000000000..70c501719 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/errors.rs @@ -0,0 +1,288 @@ +#![allow(unknown_lints)] + +use std::error::Error; +use std::fmt; +use std::io; +use std::num; +use std::process::{Output, ExitStatus}; +use std::str; +use std::string; + +use core::TargetKind; + +use curl; +use git2; +use semver; +use serde_json; +use toml; +use registry; +use ignore; + +error_chain! { + types { + CargoError, CargoErrorKind, CargoResultExt, CargoResult; + } + + links { + CrateRegistry(registry::Error, registry::ErrorKind); + } + + foreign_links { + ParseSemver(semver::ReqParseError); + Semver(semver::SemVerError); + Ignore(ignore::Error); + Io(io::Error); + SerdeJson(serde_json::Error); + TomlSer(toml::ser::Error); + TomlDe(toml::de::Error); + ParseInt(num::ParseIntError); + ParseBool(str::ParseBoolError); + Parse(string::ParseError); + Git(git2::Error); + Curl(curl::Error); + } + + errors { + Internal(err: Box) { + description(err.description()) + display("{}", *err) + } + ProcessErrorKind(proc_err: ProcessError) { + description(&proc_err.desc) + display("{}", &proc_err.desc) + } + CargoTestErrorKind(test_err: CargoTestError) { + description(&test_err.desc) + display("{}", &test_err.desc) + } + HttpNot200(code: u32, url: String) { + description("failed to get a 200 response") + display("failed to get 200 response from `{}`, got {}", url, code) + } + } +} + +impl CargoError { + pub fn into_internal(self) -> Self { + CargoError(CargoErrorKind::Internal(Box::new(self.0)), self.1) + } + + fn is_human(&self) -> bool { + match self.0 { + CargoErrorKind::Msg(_) | + CargoErrorKind::TomlSer(_) | + CargoErrorKind::TomlDe(_) | + CargoErrorKind::Curl(_) | + CargoErrorKind::HttpNot200(..) | + CargoErrorKind::ProcessErrorKind(_) | + CargoErrorKind::CrateRegistry(_) => true, + CargoErrorKind::ParseSemver(_) | + CargoErrorKind::Semver(_) | + CargoErrorKind::Ignore(_) | + CargoErrorKind::Io(_) | + CargoErrorKind::SerdeJson(_) | + CargoErrorKind::ParseInt(_) | + CargoErrorKind::ParseBool(_) | + CargoErrorKind::Parse(_) | + CargoErrorKind::Git(_) | + CargoErrorKind::Internal(_) | + CargoErrorKind::CargoTestErrorKind(_) | + CargoErrorKind::__Nonexhaustive { .. } => false + } + } +} + + +// ============================================================================= +// Process errors +#[derive(Debug)] +pub struct ProcessError { + pub desc: String, + pub exit: Option, + pub output: Option, +} + +// ============================================================================= +// Cargo test errors. + +/// Error when testcases fail +#[derive(Debug)] +pub struct CargoTestError { + pub test: Test, + pub desc: String, + pub exit: Option, + pub causes: Vec, +} + +#[derive(Debug)] +pub enum Test { + Multiple, + Doc, + UnitTest(TargetKind, String) +} + +impl CargoTestError { + pub fn new(test: Test, errors: Vec) -> Self { + if errors.is_empty() { + panic!("Cannot create CargoTestError from empty Vec") + } + let desc = errors.iter().map(|error| error.desc.clone()) + .collect::>() + .join("\n"); + CargoTestError { + test: test, + desc: desc, + exit: errors[0].exit, + causes: errors, + } + } + + pub fn hint(&self) -> String { + match self.test { + Test::UnitTest(ref kind, ref name) => { + match *kind { + TargetKind::Bench => format!("test failed, to rerun pass '--bench {}'", name), + TargetKind::Bin => format!("test failed, to rerun pass '--bin {}'", name), + TargetKind::Lib(_) => "test failed, to rerun pass '--lib'".into(), + TargetKind::Test => format!("test failed, to rerun pass '--test {}'", name), + TargetKind::ExampleBin | TargetKind::ExampleLib(_) => + format!("test failed, to rerun pass '--example {}", name), + _ => "test failed.".into() + } + }, + Test::Doc => "test failed, to rerun pass '--doc'".into(), + _ => "test failed.".into() + } + } +} + +// ============================================================================= +// CLI errors + +pub type CliResult = Result<(), CliError>; + +#[derive(Debug)] +pub struct CliError { + pub error: Option, + pub unknown: bool, + pub exit_code: i32 +} + +impl Error for CliError { + fn description(&self) -> &str { + self.error.as_ref().map(|e| e.description()) + .unwrap_or("unknown cli error") + } + + fn cause(&self) -> Option<&Error> { + self.error.as_ref().and_then(|e| e.cause()) + } +} + +impl fmt::Display for CliError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(ref error) = self.error { + error.fmt(f) + } else { + self.description().fmt(f) + } + } +} + +impl CliError { + pub fn new(error: CargoError, code: i32) -> CliError { + let human = &error.is_human(); + CliError { error: Some(error), exit_code: code, unknown: !human } + } + + pub fn code(code: i32) -> CliError { + CliError { error: None, exit_code: code, unknown: false } + } +} + +impl From for CliError { + fn from(err: CargoError) -> CliError { + CliError::new(err, 101) + } +} + + +// ============================================================================= +// Construction helpers + +pub fn process_error(msg: &str, + status: Option<&ExitStatus>, + output: Option<&Output>) -> ProcessError +{ + let exit = match status { + Some(s) => status_to_string(s), + None => "never executed".to_string(), + }; + let mut desc = format!("{} ({})", &msg, exit); + + if let Some(out) = output { + match str::from_utf8(&out.stdout) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stdout\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + match str::from_utf8(&out.stderr) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stderr\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + + return ProcessError { + desc: desc, + exit: status.cloned(), + output: output.cloned(), + }; + + #[cfg(unix)] + fn status_to_string(status: &ExitStatus) -> String { + use std::os::unix::process::*; + use libc; + + if let Some(signal) = status.signal() { + let name = match signal as libc::c_int { + libc::SIGABRT => ", SIGABRT: process abort signal", + libc::SIGALRM => ", SIGALRM: alarm clock", + libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", + libc::SIGHUP => ", SIGHUP: hangup", + libc::SIGILL => ", SIGILL: illegal instruction", + libc::SIGINT => ", SIGINT: terminal interrupt signal", + libc::SIGKILL => ", SIGKILL: kill", + libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", + libc::SIGQUIT => ", SIGQUIT: terminal quite signal", + libc::SIGSEGV => ", SIGSEGV: invalid memory reference", + libc::SIGTERM => ", SIGTERM: termination signal", + libc::SIGBUS => ", SIGBUS: access to undefined memory", + #[cfg(not(target_os = "haiku"))] + libc::SIGSYS => ", SIGSYS: bad system call", + libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", + _ => "", + }; + format!("signal: {}{}", signal, name) + } else { + status.to_string() + } + } + + #[cfg(windows)] + fn status_to_string(status: &ExitStatus) -> String { + status.to_string() + } +} + +pub fn internal(error: S) -> CargoError { + _internal(&error) +} + +fn _internal(error: &fmt::Display) -> CargoError { + CargoError::from_kind(error.to_string().into()).into_internal() +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/flock.rs b/collector/compile-benchmarks/cargo/src/cargo/util/flock.rs new file mode 100644 index 000000000..9f6ae48ea --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/flock.rs @@ -0,0 +1,344 @@ +use std::fs::{self, File, OpenOptions}; +use std::io::*; +use std::io; +use std::path::{Path, PathBuf, Display}; + +use termcolor::Color::Cyan; +use fs2::{FileExt, lock_contended_error}; +#[allow(unused_imports)] +use libc; + +use util::Config; +use util::errors::{CargoResult, CargoResultExt}; + +pub struct FileLock { + f: Option, + path: PathBuf, + state: State, +} + +#[derive(PartialEq)] +enum State { + Unlocked, + Shared, + Exclusive, +} + +impl FileLock { + /// Returns the underlying file handle of this lock. + pub fn file(&self) -> &File { + self.f.as_ref().unwrap() + } + + /// Returns the underlying path that this lock points to. + /// + /// Note that special care must be taken to ensure that the path is not + /// referenced outside the lifetime of this lock. + pub fn path(&self) -> &Path { + assert!(self.state != State::Unlocked); + &self.path + } + + /// Returns the parent path containing this file + pub fn parent(&self) -> &Path { + assert!(self.state != State::Unlocked); + self.path.parent().unwrap() + } + + /// Removes all sibling files to this locked file. + /// + /// This can be useful if a directory is locked with a sentinel file but it + /// needs to be cleared out as it may be corrupt. + pub fn remove_siblings(&self) -> io::Result<()> { + let path = self.path(); + for entry in path.parent().unwrap().read_dir()? { + let entry = entry?; + if Some(&entry.file_name()[..]) == path.file_name() { + continue + } + let kind = entry.file_type()?; + if kind.is_dir() { + fs::remove_dir_all(entry.path())?; + } else { + fs::remove_file(entry.path())?; + } + } + Ok(()) + } +} + +impl Read for FileLock { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.file().read(buf) + } +} + +impl Seek for FileLock { + fn seek(&mut self, to: SeekFrom) -> io::Result { + self.file().seek(to) + } +} + +impl Write for FileLock { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.file().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.file().flush() + } +} + +impl Drop for FileLock { + fn drop(&mut self) { + if self.state != State::Unlocked { + if let Some(f) = self.f.take() { + let _ = f.unlock(); + } + } + } +} + +/// A "filesystem" is intended to be a globally shared, hence locked, resource +/// in Cargo. +/// +/// The `Path` of a filesystem cannot be learned unless it's done in a locked +/// fashion, and otherwise functions on this structure are prepared to handle +/// concurrent invocations across multiple instances of Cargo. +#[derive(Clone, Debug)] +pub struct Filesystem { + root: PathBuf, +} + +impl Filesystem { + /// Creates a new filesystem to be rooted at the given path. + pub fn new(path: PathBuf) -> Filesystem { + Filesystem { root: path } + } + + /// Like `Path::join`, creates a new filesystem rooted at this filesystem + /// joined with the given path. + pub fn join>(&self, other: T) -> Filesystem { + Filesystem::new(self.root.join(other)) + } + + /// Like `Path::push`, pushes a new path component onto this filesystem. + pub fn push>(&mut self, other: T) { + self.root.push(other); + } + + /// Consumes this filesystem and returns the underlying `PathBuf`. + /// + /// Note that this is a relatively dangerous operation and should be used + /// with great caution!. + pub fn into_path_unlocked(self) -> PathBuf { + self.root + } + + /// Creates the directory pointed to by this filesystem. + /// + /// Handles errors where other Cargo processes are also attempting to + /// concurrently create this directory. + pub fn create_dir(&self) -> io::Result<()> { + create_dir_all(&self.root) + } + + /// Returns an adaptor that can be used to print the path of this + /// filesystem. + pub fn display(&self) -> Display { + self.root.display() + } + + /// Opens exclusive access to a file, returning the locked version of a + /// file. + /// + /// This function will create a file at `path` if it doesn't already exist + /// (including intermediate directories), and then it will acquire an + /// exclusive lock on `path`. If the process must block waiting for the + /// lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has + /// read/write access to the underlying file. + pub fn open_rw

(&self, + path: P, + config: &Config, + msg: &str) -> CargoResult + where P: AsRef + { + self.open(path.as_ref(), + OpenOptions::new().read(true).write(true).create(true), + State::Exclusive, + config, + msg) + } + + /// Opens shared access to a file, returning the locked version of a file. + /// + /// This function will fail if `path` doesn't already exist, but if it does + /// then it will acquire a shared lock on `path`. If the process must block + /// waiting for the lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has read + /// access to the underlying file. Any writes to the file will return an + /// error. + pub fn open_ro

(&self, + path: P, + config: &Config, + msg: &str) -> CargoResult + where P: AsRef + { + self.open(path.as_ref(), + OpenOptions::new().read(true), + State::Shared, + config, + msg) + } + + fn open(&self, + path: &Path, + opts: &OpenOptions, + state: State, + config: &Config, + msg: &str) -> CargoResult { + let path = self.root.join(path); + + // If we want an exclusive lock then if we fail because of NotFound it's + // likely because an intermediate directory didn't exist, so try to + // create the directory and then continue. + let f = opts.open(&path).or_else(|e| { + if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { + create_dir_all(path.parent().unwrap())?; + opts.open(&path) + } else { + Err(e) + } + }).chain_err(|| { + format!("failed to open: {}", path.display()) + })?; + match state { + State::Exclusive => { + acquire(config, msg, &path, + &|| f.try_lock_exclusive(), + &|| f.lock_exclusive())?; + } + State::Shared => { + acquire(config, msg, &path, + &|| f.try_lock_shared(), + &|| f.lock_shared())?; + } + State::Unlocked => {} + + } + Ok(FileLock { f: Some(f), path: path, state: state }) + } +} + +/// Acquires a lock on a file in a "nice" manner. +/// +/// Almost all long-running blocking actions in Cargo have a status message +/// associated with them as we're not sure how long they'll take. Whenever a +/// conflicted file lock happens, this is the case (we're not sure when the lock +/// will be released). +/// +/// This function will acquire the lock on a `path`, printing out a nice message +/// to the console if we have to wait for it. It will first attempt to use `try` +/// to acquire a lock on the crate, and in the case of contention it will emit a +/// status message based on `msg` to `config`'s shell, and then use `block` to +/// block waiting to acquire a lock. +/// +/// Returns an error if the lock could not be acquired or if any error other +/// than a contention error happens. +fn acquire(config: &Config, + msg: &str, + path: &Path, + try: &Fn() -> io::Result<()>, + block: &Fn() -> io::Result<()>) -> CargoResult<()> { + + // File locking on Unix is currently implemented via `flock`, which is known + // to be broken on NFS. We could in theory just ignore errors that happen on + // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking + // forever**, even if the nonblocking flag is passed! + // + // As a result, we just skip all file locks entirely on NFS mounts. That + // should avoid calling any `flock` functions at all, and it wouldn't work + // there anyway. + // + // [1]: https://github.com/rust-lang/cargo/issues/2615 + if is_on_nfs_mount(path) { + return Ok(()) + } + + match try() { + Ok(()) => return Ok(()), + + // In addition to ignoring NFS which is commonly not working we also + // just ignore locking on filesystems that look like they don't + // implement file locking. We detect that here via the return value of + // locking (e.g. inspecting errno). + #[cfg(unix)] + Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => return Ok(()), + + #[cfg(target_os = "linux")] + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => return Ok(()), + + Err(e) => { + if e.raw_os_error() != lock_contended_error().raw_os_error() { + return Err(e).chain_err(|| { + format!("failed to lock file: {}", path.display()) + }) + } + } + } + let msg = format!("waiting for file lock on {}", msg); + config.shell().status_with_color("Blocking", &msg, Cyan)?; + + return block().chain_err(|| { + format!("failed to lock file: {}", path.display()) + }); + + #[cfg(all(target_os = "linux", not(target_env = "musl")))] + fn is_on_nfs_mount(path: &Path) -> bool { + use std::ffi::CString; + use std::mem; + use std::os::unix::prelude::*; + + let path = match CString::new(path.as_os_str().as_bytes()) { + Ok(path) => path, + Err(_) => return false, + }; + + unsafe { + let mut buf: libc::statfs = mem::zeroed(); + let r = libc::statfs(path.as_ptr(), &mut buf); + + r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32 + } + } + + #[cfg(any(not(target_os = "linux"), target_env = "musl"))] + fn is_on_nfs_mount(_path: &Path) -> bool { + false + } +} + +fn create_dir_all(path: &Path) -> io::Result<()> { + match create_dir(path) { + Ok(()) => Ok(()), + Err(e) => { + if e.kind() == io::ErrorKind::NotFound { + if let Some(p) = path.parent() { + return create_dir_all(p).and_then(|()| create_dir(path)) + } + } + Err(e) + } + } +} + +fn create_dir(path: &Path) -> io::Result<()> { + match fs::create_dir(path) { + Ok(()) => Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), + Err(e) => Err(e), + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/graph.rs b/collector/compile-benchmarks/cargo/src/cargo/util/graph.rs new file mode 100644 index 000000000..d97b9d44d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/graph.rs @@ -0,0 +1,105 @@ +use std::fmt; +use std::hash::Hash; +use std::collections::hash_set::{HashSet, Iter}; +use std::collections::hash_map::{HashMap, Keys}; + +pub struct Graph { + nodes: HashMap> +} + +enum Mark { + InProgress, + Done +} + +pub type Nodes<'a, N> = Keys<'a, N, HashSet>; +pub type Edges<'a, N> = Iter<'a, N>; + +impl Graph { + pub fn new() -> Graph { + Graph { nodes: HashMap::new() } + } + + pub fn add(&mut self, node: N, children: &[N]) { + self.nodes.entry(node) + .or_insert_with(HashSet::new) + .extend(children.iter().cloned()); + } + + pub fn link(&mut self, node: N, child: N) { + self.nodes.entry(node).or_insert_with(HashSet::new).insert(child); + } + + pub fn get_nodes(&self) -> &HashMap> { + &self.nodes + } + + pub fn edges(&self, node: &N) -> Option> { + self.nodes.get(node).map(|set| set.iter()) + } + + pub fn sort(&self) -> Option> { + let mut ret = Vec::new(); + let mut marks = HashMap::new(); + + for node in self.nodes.keys() { + self.visit(node, &mut ret, &mut marks); + } + + Some(ret) + } + + fn visit(&self, node: &N, dst: &mut Vec, marks: &mut HashMap) { + if marks.contains_key(node) { + return; + } + + marks.insert(node.clone(), Mark::InProgress); + + for child in &self.nodes[node] { + self.visit(child, dst, marks); + } + + dst.push(node.clone()); + marks.insert(node.clone(), Mark::Done); + } + + pub fn iter(&self) -> Nodes { + self.nodes.keys() + } +} + +impl Default for Graph { + fn default() -> Graph { + Graph::new() + } +} + +impl fmt::Debug for Graph { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + writeln!(fmt, "Graph {{")?; + + for (n, e) in &self.nodes { + writeln!(fmt, " - {}", n)?; + + for n in e.iter() { + writeln!(fmt, " - {}", n)?; + } + } + + write!(fmt, "}}")?; + + Ok(()) + } +} + +impl PartialEq for Graph { + fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } +} +impl Eq for Graph {} + +impl Clone for Graph { + fn clone(&self) -> Graph { + Graph { nodes: self.nodes.clone() } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/hex.rs b/collector/compile-benchmarks/cargo/src/cargo/util/hex.rs new file mode 100644 index 000000000..71a4c112f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/hex.rs @@ -0,0 +1,27 @@ +#![allow(deprecated)] + +use hex::ToHex; +use std::hash::{Hasher, Hash, SipHasher}; + +pub fn to_hex(num: u64) -> String { + [ + (num >> 0) as u8, + (num >> 8) as u8, + (num >> 16) as u8, + (num >> 24) as u8, + (num >> 32) as u8, + (num >> 40) as u8, + (num >> 48) as u8, + (num >> 56) as u8, + ].to_hex() +} + +pub fn hash_u64(hashable: &H) -> u64 { + let mut hasher = SipHasher::new_with_keys(0, 0); + hashable.hash(&mut hasher); + hasher.finish() +} + +pub fn short_hash(hashable: &H) -> String { + to_hex(hash_u64(hashable)) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/important_paths.rs b/collector/compile-benchmarks/cargo/src/cargo/util/important_paths.rs new file mode 100644 index 000000000..069979ea9 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/important_paths.rs @@ -0,0 +1,65 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use util::errors::CargoResult; +use util::paths; + +/// Iteratively search for `file` in `pwd` and its parents, returning +/// the path of the directory. +pub fn find_project(pwd: &Path, file: &str) -> CargoResult { + find_project_manifest(pwd, file).map(|mut p| { + // remove the file, leaving just the directory + p.pop(); + p + }) +} + +/// Iteratively search for `file` in `pwd` and its parents, returning +/// the path to the file. +pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { + let mut current = pwd; + + loop { + let manifest = current.join(file); + if fs::metadata(&manifest).is_ok() { + return Ok(manifest) + } + + match current.parent() { + Some(p) => current = p, + None => break, + } + } + + bail!("could not find `{}` in `{}` or any parent directory", + file, pwd.display()) +} + +/// Find the root Cargo.toml +pub fn find_root_manifest_for_wd(manifest_path: Option, cwd: &Path) + -> CargoResult { + match manifest_path { + Some(path) => { + let absolute_path = paths::normalize_path(&cwd.join(&path)); + if !absolute_path.ends_with("Cargo.toml") { + bail!("the manifest-path must be a path to a Cargo.toml file") + } + if !fs::metadata(&absolute_path).is_ok() { + bail!("manifest path `{}` does not exist", path) + } + Ok(absolute_path) + }, + None => find_project_manifest(cwd, "Cargo.toml"), + } +} + +/// Return the path to the `file` in `pwd`, if it exists. +pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { + let manifest = pwd.join(file); + + if fs::metadata(&manifest).is_ok() { + Ok(manifest) + } else { + Err(format!("Could not find `{}` in `{}`", + file, pwd.display()).into()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/job.rs b/collector/compile-benchmarks/cargo/src/cargo/util/job.rs new file mode 100644 index 000000000..06f51356d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/job.rs @@ -0,0 +1,260 @@ +//! Job management (mostly for windows) +//! +//! Most of the time when you're running cargo you expect Ctrl-C to actually +//! terminate the entire tree of processes in play, not just the one at the top +//! (cago). This currently works "by default" on Unix platforms because Ctrl-C +//! actually sends a signal to the *process group* rather than the parent +//! process, so everything will get torn down. On Windows, however, this does +//! not happen and Ctrl-C just kills cargo. +//! +//! To achieve the same semantics on Windows we use Job Objects to ensure that +//! all processes die at the same time. Job objects have a mode of operation +//! where when all handles to the object are closed it causes all child +//! processes associated with the object to be terminated immediately. +//! Conveniently whenever a process in the job object spawns a new process the +//! child will be associated with the job object as well. This means if we add +//! ourselves to the job object we create then everything will get torn down! + +pub use self::imp::Setup; + +pub fn setup() -> Option { + unsafe { imp::setup() } +} + +#[cfg(unix)] +mod imp { + use std::env; + use libc; + + pub type Setup = (); + + pub unsafe fn setup() -> Option<()> { + // There's a test case for the behavior of + // when-cargo-is-killed-subprocesses-are-also-killed, but that requires + // one cargo spawned to become its own session leader, so we do that + // here. + if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() { + libc::setsid(); + } + Some(()) + } +} + +#[cfg(windows)] +mod imp { + extern crate kernel32; + extern crate winapi; + extern crate psapi; + + use std::ffi::OsString; + use std::io; + use std::mem; + use std::os::windows::prelude::*; + + pub struct Setup { + job: Handle, + } + + pub struct Handle { + inner: winapi::HANDLE, + } + + fn last_err() -> io::Error { + io::Error::last_os_error() + } + + pub unsafe fn setup() -> Option { + // Creates a new job object for us to use and then adds ourselves to it. + // Note that all errors are basically ignored in this function, + // intentionally. Job objects are "relatively new" in Windows, + // particularly the ability to support nested job objects. Older + // Windows installs don't support this ability. We probably don't want + // to force Cargo to abort in this situation or force others to *not* + // use job objects, so we instead just ignore errors and assume that + // we're otherwise part of someone else's job object in this case. + + let job = kernel32::CreateJobObjectW(0 as *mut _, 0 as *const _); + if job.is_null() { + return None + } + let job = Handle { inner: job }; + + // Indicate that when all handles to the job object are gone that all + // process in the object should be killed. Note that this includes our + // entire process tree by default because we've added ourselves and and + // our children will reside in the job once we spawn a process. + let mut info: winapi::JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + info.BasicLimitInformation.LimitFlags = + winapi::JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; + let r = kernel32::SetInformationJobObject(job.inner, + winapi::JobObjectExtendedLimitInformation, + &mut info as *mut _ as winapi::LPVOID, + mem::size_of_val(&info) as winapi::DWORD); + if r == 0 { + return None + } + + // Assign our process to this job object, meaning that our children will + // now live or die based on our existence. + let me = kernel32::GetCurrentProcess(); + let r = kernel32::AssignProcessToJobObject(job.inner, me); + if r == 0 { + return None + } + + Some(Setup { job: job }) + } + + impl Drop for Setup { + fn drop(&mut self) { + // This is a litte subtle. By default if we are terminated then all + // processes in our job object are terminated as well, but we + // intentionally want to whitelist some processes to outlive our job + // object (see below). + // + // To allow for this, we manually kill processes instead of letting + // the job object kill them for us. We do this in a loop to handle + // processes spawning other processes. + // + // Finally once this is all done we know that the only remaining + // ones are ourselves and the whitelisted processes. The destructor + // here then configures our job object to *not* kill everything on + // close, then closes the job object. + unsafe { + while self.kill_remaining() { + info!("killed some, going for more"); + } + + let mut info: winapi::JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + let r = kernel32::SetInformationJobObject( + self.job.inner, + winapi::JobObjectExtendedLimitInformation, + &mut info as *mut _ as winapi::LPVOID, + mem::size_of_val(&info) as winapi::DWORD); + if r == 0 { + info!("failed to configure job object to defaults: {}", + last_err()); + } + } + } + } + + impl Setup { + unsafe fn kill_remaining(&mut self) -> bool { + #[repr(C)] + struct Jobs { + header: winapi::JOBOBJECT_BASIC_PROCESS_ID_LIST, + list: [winapi::ULONG_PTR; 1024], + } + + let mut jobs: Jobs = mem::zeroed(); + let r = kernel32::QueryInformationJobObject( + self.job.inner, + winapi::JobObjectBasicProcessIdList, + &mut jobs as *mut _ as winapi::LPVOID, + mem::size_of_val(&jobs) as winapi::DWORD, + 0 as *mut _); + if r == 0 { + info!("failed to query job object: {}", last_err()); + return false + } + + let mut killed = false; + let list = &jobs.list[..jobs.header.NumberOfProcessIdsInList as usize]; + assert!(list.len() > 0); + info!("found {} remaining processes", list.len() - 1); + + let list = list.iter().filter(|&&id| { + // let's not kill ourselves + id as winapi::DWORD != kernel32::GetCurrentProcessId() + }).filter_map(|&id| { + // Open the process with the necessary rights, and if this + // fails then we probably raced with the process exiting so we + // ignore the problem. + let flags = winapi::PROCESS_QUERY_INFORMATION | + winapi::PROCESS_TERMINATE | + winapi::SYNCHRONIZE; + let p = kernel32::OpenProcess(flags, + winapi::FALSE, + id as winapi::DWORD); + if p.is_null() { + None + } else { + Some(Handle { inner: p }) + } + }).filter(|p| { + // Test if this process was actually in the job object or not. + // If it's not then we likely raced with something else + // recycling this PID, so we just skip this step. + let mut res = 0; + let r = kernel32::IsProcessInJob(p.inner, self.job.inner, &mut res); + if r == 0 { + info!("failed to test is process in job: {}", last_err()); + return false + } + res == winapi::TRUE + }); + + + for p in list { + // Load the file which this process was spawned from. We then + // later use this for identification purposes. + let mut buf = [0; 1024]; + let r = psapi::GetProcessImageFileNameW(p.inner, + buf.as_mut_ptr(), + buf.len() as winapi::DWORD); + if r == 0 { + info!("failed to get image name: {}", last_err()); + continue + } + let s = OsString::from_wide(&buf[..r as usize]); + info!("found remaining: {:?}", s); + + // And here's where we find the whole purpose for this + // function! Currently, our only whitelisted process is + // `mspdbsrv.exe`, and more details about that can be found + // here: + // + // https://github.com/rust-lang/rust/issues/33145 + // + // The gist of it is that all builds on one machine use the + // same `mspdbsrv.exe` instance. If we were to kill this + // instance then we could erroneously cause other builds to + // fail. + if let Some(s) = s.to_str() { + if s.contains("mspdbsrv") { + info!("\toops, this is mspdbsrv"); + continue + } + } + + // Ok, this isn't mspdbsrv, let's kill the process. After we + // kill it we wait on it to ensure that the next time around in + // this function we're not going to see it again. + let r = kernel32::TerminateProcess(p.inner, 1); + if r == 0 { + info!("\tfailed to kill subprocess: {}", last_err()); + info!("\tassuming subprocess is dead..."); + } else { + info!("\tterminated subprocess"); + } + let r = kernel32::WaitForSingleObject(p.inner, winapi::INFINITE); + if r != 0 { + info!("failed to wait for process to die: {}", last_err()); + return false + } + killed = true; + } + + return killed + } + } + + impl Drop for Handle { + fn drop(&mut self) { + unsafe { kernel32::CloseHandle(self.inner); } + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/lazy_cell.rs b/collector/compile-benchmarks/cargo/src/cargo/util/lazy_cell.rs new file mode 100644 index 000000000..607f2ef98 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/lazy_cell.rs @@ -0,0 +1,73 @@ +//! A lazily fill Cell, but with frozen contents. +//! +//! With a `RefCell`, the inner contents cannot be borrowed for the lifetime of +//! the entire object, but only of the borrows returned. A `LazyCell` is a +//! variation on `RefCell` which allows borrows tied to the lifetime of the +//! outer object. +//! +//! The limitation of a `LazyCell` is that after initialized, it can never be +//! modified unless you've otherwise got a `&mut` reference + +use std::cell::UnsafeCell; + +#[derive(Debug)] +pub struct LazyCell { + inner: UnsafeCell>, +} + +impl LazyCell { + /// Creates a new empty lazy cell. + pub fn new() -> LazyCell { + LazyCell { inner: UnsafeCell::new(None) } + } + + /// Put a value into this cell. + /// + /// This function will fail if the cell has already been filled. + pub fn fill(&self, t: T) -> Result<(), T> { + unsafe { + let slot = self.inner.get(); + if (*slot).is_none() { + *slot = Some(t); + Ok(()) + } else { + Err(t) + } + } + } + + /// Borrows the contents of this lazy cell for the duration of the cell + /// itself. + /// + /// This function will return `Some` if the cell has been previously + /// initialized, and `None` if it has not yet been initialized. + pub fn borrow(&self) -> Option<&T> { + unsafe { + (*self.inner.get()).as_ref() + } + } + + /// Same as `borrow`, but the mutable version + pub fn borrow_mut(&mut self) -> Option<&mut T> { + unsafe { + (*self.inner.get()).as_mut() + } + } + + /// Consumes this `LazyCell`, returning the underlying value. + pub fn into_inner(self) -> Option { + unsafe { + self.inner.into_inner() + } + } + + /// Borrows the contents of this lazy cell, initializing it if necessary. + pub fn get_or_try_init(&self, init: F) -> Result<&T, Error> + where F: FnOnce() -> Result + { + if self.borrow().is_none() && self.fill(init()?).is_err() { + unreachable!(); + } + Ok(self.borrow().unwrap()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/lev_distance.rs b/collector/compile-benchmarks/cargo/src/cargo/util/lev_distance.rs new file mode 100644 index 000000000..d55a3443a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/lev_distance.rs @@ -0,0 +1,53 @@ +use std::cmp; + +pub fn lev_distance(me: &str, t: &str) -> usize { + if me.is_empty() { return t.chars().count(); } + if t.is_empty() { return me.chars().count(); } + + let mut dcol = (0..t.len() + 1).collect::>(); + let mut t_last = 0; + + for (i, sc) in me.chars().enumerate() { + + let mut current = i; + dcol[0] = current + 1; + + for (j, tc) in t.chars().enumerate() { + + let next = dcol[j + 1]; + + if sc == tc { + dcol[j + 1] = current; + } else { + dcol[j + 1] = cmp::min(current, next); + dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; + } + + current = next; + t_last = j; + } + } + + dcol[t_last + 1] +} + +#[test] +fn test_lev_distance() { + use std::char::{ from_u32, MAX }; + // Test bytelength agnosticity + for c in (0u32..MAX as u32) + .filter_map(|i| from_u32(i)) + .map(|i| i.to_string()) { + assert_eq!(lev_distance(&c, &c), 0); + } + + let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; + let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; + let c = "Mary häd ä little lämb\n\nLittle lämb\n"; + assert_eq!(lev_distance(a, b), 1); + assert_eq!(lev_distance(b, a), 1); + assert_eq!(lev_distance(a, c), 2); + assert_eq!(lev_distance(c, a), 2); + assert_eq!(lev_distance(b, c), 1); + assert_eq!(lev_distance(c, b), 1); +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/machine_message.rs b/collector/compile-benchmarks/cargo/src/cargo/util/machine_message.rs new file mode 100644 index 000000000..ddfeed7de --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/machine_message.rs @@ -0,0 +1,58 @@ +use serde::ser; +use serde_json::{self, Value}; + +use core::{PackageId, Target, Profile}; + +pub trait Message: ser::Serialize { + fn reason(&self) -> &str; +} + +pub fn emit(t: &T) { + let mut json: Value = serde_json::to_value(t).unwrap(); + json["reason"] = json!(t.reason()); + println!("{}", json); +} + +#[derive(Serialize)] +pub struct FromCompiler<'a> { + pub package_id: &'a PackageId, + pub target: &'a Target, + pub message: serde_json::Value, +} + +impl<'a> Message for FromCompiler<'a> { + fn reason(&self) -> &str { + "compiler-message" + } +} + +#[derive(Serialize)] +pub struct Artifact<'a> { + pub package_id: &'a PackageId, + pub target: &'a Target, + pub profile: &'a Profile, + pub features: Vec, + pub filenames: Vec, + pub fresh: bool, +} + +impl<'a> Message for Artifact<'a> { + fn reason(&self) -> &str { + "compiler-artifact" + } +} + +#[derive(Serialize)] +pub struct BuildScript<'a> { + pub package_id: &'a PackageId, + pub linked_libs: &'a [String], + pub linked_paths: &'a [String], + pub cfgs: &'a [String], + pub env: &'a [(String, String)], +} + +impl<'a> Message for BuildScript<'a> { + fn reason(&self) -> &str { + "build-script-executed" + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/util/mod.rs new file mode 100644 index 000000000..9c1c9c5e0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/mod.rs @@ -0,0 +1,44 @@ +pub use self::cfg::{Cfg, CfgExpr}; +pub use self::config::{Config, ConfigValue, homedir}; +pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; +pub use self::errors::{CargoResult, CargoResultExt, CargoError, CargoErrorKind, Test, CliResult}; +pub use self::errors::{CliError, ProcessError, CargoTestError}; +pub use self::errors::{process_error, internal}; +pub use self::flock::{FileLock, Filesystem}; +pub use self::graph::Graph; +pub use self::hex::{to_hex, short_hash, hash_u64}; +pub use self::lazy_cell::LazyCell; +pub use self::lev_distance::{lev_distance}; +pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; +pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix}; +pub use self::process_builder::{process, ProcessBuilder}; +pub use self::rustc::Rustc; +pub use self::sha256::Sha256; +pub use self::to_semver::ToSemver; +pub use self::to_url::ToUrl; +pub use self::vcs::{GitRepo, HgRepo, PijulRepo, FossilRepo}; +pub use self::read2::read2; + +pub mod config; +pub mod errors; +pub mod graph; +pub mod hex; +pub mod important_paths; +pub mod job; +pub mod lev_distance; +pub mod machine_message; +pub mod network; +pub mod paths; +pub mod process_builder; +pub mod profile; +pub mod to_semver; +pub mod to_url; +pub mod toml; +mod cfg; +mod dependency_queue; +mod rustc; +mod sha256; +mod vcs; +mod lazy_cell; +mod flock; +mod read2; diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/network.rs b/collector/compile-benchmarks/cargo/src/cargo/util/network.rs new file mode 100644 index 000000000..4c7c4dcb5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/network.rs @@ -0,0 +1,106 @@ +use std; +use std::error::Error; + +use error_chain::ChainedError; +use util::Config; +use util::errors::{CargoError, CargoErrorKind, CargoResult}; +use git2; + +fn maybe_spurious(err: &E) -> bool + where E: ChainedError + 'static { + //Error inspection in non-verbose mode requires inspecting the + //error kind to avoid printing Internal errors. The downcasting + //machinery requires &(Error + 'static), but the iterator (and + //underlying `cause`) return &Error. Because the borrows are + //constrained to this handling method, and because the original + //error object is constrained to be 'static, we're casting away + //the borrow's actual lifetime for purposes of downcasting and + //inspecting the error chain + unsafe fn extend_lifetime(r: &Error) -> &(Error + 'static) { + std::mem::transmute::<&Error, &Error>(r) + } + + for e in err.iter() { + let e = unsafe { extend_lifetime(e) }; + if let Some(cargo_err) = e.downcast_ref::() { + match cargo_err.kind() { + &CargoErrorKind::Git(ref git_err) => { + match git_err.class() { + git2::ErrorClass::Net | + git2::ErrorClass::Os => return true, + _ => () + } + } + &CargoErrorKind::Curl(ref curl_err) + if curl_err.is_couldnt_connect() || + curl_err.is_couldnt_resolve_proxy() || + curl_err.is_couldnt_resolve_host() || + curl_err.is_operation_timedout() || + curl_err.is_recv_error() => { + return true + } + &CargoErrorKind::HttpNot200(code, ref _url) if 500 <= code && code < 600 => { + return true + } + _ => () + } + } + } + false +} + +/// Wrapper method for network call retry logic. +/// +/// Retry counts provided by Config object `net.retry`. Config shell outputs +/// a warning on per retry. +/// +/// Closure must return a `CargoResult`. +/// +/// # Examples +/// +/// ```ignore +/// use util::network; +/// cargo_result = network.with_retry(&config, || something.download()); +/// ``` +pub fn with_retry(config: &Config, mut callback: F) -> CargoResult + where F: FnMut() -> CargoResult +{ + let mut remaining = config.net_retry()?; + loop { + match callback() { + Ok(ret) => return Ok(ret), + Err(ref e) if maybe_spurious(e) && remaining > 0 => { + let msg = format!("spurious network error ({} tries \ + remaining): {}", remaining, e); + config.shell().warn(msg)?; + remaining -= 1; + } + //todo impl from + Err(e) => return Err(e.into()), + } + } +} +#[test] +fn with_retry_repeats_the_call_then_works() { + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + let error1 = CargoErrorKind::HttpNot200(501, "Uri".to_string()).into(); + let error2 = CargoErrorKind::HttpNot200(502, "Uri".to_string()).into(); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} + +#[test] +fn with_retry_finds_nested_spurious_errors() { + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + //String error messages are not considered spurious + let error1 : CargoError = CargoErrorKind::HttpNot200(501, "Uri".to_string()).into(); + let error1 = CargoError::with_chain(error1, "A non-spurious wrapping err"); + let error2 = CargoError::from_kind(CargoErrorKind::HttpNot200(502, "Uri".to_string())); + let error2 = CargoError::with_chain(error2, "A second chained error"); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/paths.rs b/collector/compile-benchmarks/cargo/src/cargo/util/paths.rs new file mode 100644 index 000000000..ea6a66958 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/paths.rs @@ -0,0 +1,183 @@ +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs::File; +use std::fs::OpenOptions; +use std::io::prelude::*; +use std::path::{Path, PathBuf, Component}; + +use util::{internal, CargoResult}; +use util::errors::CargoResultExt; + +pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { + env::join_paths(paths.iter()).or_else(|e| { + let paths = paths.iter().map(Path::new).collect::>(); + Err(internal(format!("failed to join path array: {:?}", paths))).chain_err(|| { + format!("failed to join search paths together: {}\n\ + Does ${} have an unterminated quote character?", + e, env) + }) + }) +} + +pub fn dylib_path_envvar() -> &'static str { + if cfg!(windows) {"PATH"} + else if cfg!(target_os = "macos") {"DYLD_LIBRARY_PATH"} + else {"LD_LIBRARY_PATH"} +} + +pub fn dylib_path() -> Vec { + match env::var_os(dylib_path_envvar()) { + Some(var) => env::split_paths(&var).collect(), + None => Vec::new(), + } +} + +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek() + .cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { ret.push(component.as_os_str()); } + Component::CurDir => {} + Component::ParentDir => { ret.pop(); } + Component::Normal(c) => { ret.push(c); } + } + } + ret +} + +pub fn without_prefix<'a>(long_path: &'a Path, prefix: &'a Path) -> Option<&'a Path> { + let mut a = long_path.components(); + let mut b = prefix.components(); + loop { + match b.next() { + Some(y) => match a.next() { + Some(x) if x == y => continue, + _ => return None, + }, + None => return Some(a.as_path()), + } + } +} + +pub fn read(path: &Path) -> CargoResult { + match String::from_utf8(read_bytes(path)?) { + Ok(s) => Ok(s), + Err(_) => bail!("path at `{}` was not valid utf-8", path.display()), + } +} + +pub fn read_bytes(path: &Path) -> CargoResult> { + (|| -> CargoResult<_> { + let mut ret = Vec::new(); + let mut f = File::open(path)?; + if let Ok(m) = f.metadata() { + ret.reserve(m.len() as usize + 1); + } + f.read_to_end(&mut ret)?; + Ok(ret) + })().chain_err(|| { + format!("failed to read `{}`", path.display()) + }) +} + +pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = File::create(path)?; + f.write_all(contents)?; + Ok(()) + })().chain_err(|| { + format!("failed to write `{}`", path.display()) + }) +} + +pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; + + f.write_all(contents)?; + Ok(()) + })().chain_err(|| { + internal(format!("failed to write `{}`", path.display())) + }) +} + +#[cfg(unix)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + use std::os::unix::prelude::*; + Ok(path.as_os_str().as_bytes()) +} +#[cfg(windows)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + match path.as_os_str().to_str() { + Some(s) => Ok(s.as_bytes()), + None => Err(format!("invalid non-unicode path: {}", + path.display()).into()) + } +} + +#[cfg(unix)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(PathBuf::from(OsStr::from_bytes(bytes))) +} +#[cfg(windows)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(bytes) { + Ok(s) => Ok(PathBuf::from(s)), + Err(..) => Err("invalid non-unicode path".into()), + } +} + +pub fn ancestors(path: &Path) -> PathAncestors { + PathAncestors::new(path) +} + +pub struct PathAncestors<'a> { + current: Option<&'a Path>, + stop_at: Option +} + +impl<'a> PathAncestors<'a> { + fn new(path: &Path) -> PathAncestors { + PathAncestors { + current: Some(path), + //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. + stop_at: env::var("__CARGO_TEST_ROOT").ok().map(PathBuf::from), + } + } +} + +impl<'a> Iterator for PathAncestors<'a> { + type Item = &'a Path; + + fn next(&mut self) -> Option<&'a Path> { + if let Some(path) = self.current { + self.current = path.parent(); + + if let Some(ref stop_at) = self.stop_at { + if path == stop_at { + self.current = None; + } + } + + Some(path) + } else { + None + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/process_builder.rs b/collector/compile-benchmarks/cargo/src/cargo/util/process_builder.rs new file mode 100644 index 000000000..ab5de7f08 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/process_builder.rs @@ -0,0 +1,312 @@ +use std::collections::HashMap; +use std::env; +use std::ffi::{OsString, OsStr}; +use std::fmt; +use std::path::Path; +use std::process::{Command, Stdio, Output}; + +use jobserver::Client; +use shell_escape::escape; + +use util::{CargoResult, CargoResultExt, CargoError, process_error, read2}; +use util::errors::CargoErrorKind; + +/// A builder object for an external process, similar to `std::process::Command`. +#[derive(Clone, Debug)] +pub struct ProcessBuilder { + /// The program to execute. + program: OsString, + /// A list of arguments to pass to the program. + args: Vec, + /// Any environment variables that should be set for the program. + env: HashMap>, + /// Which directory to run the program from. + cwd: Option, + /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + jobserver: Option, +} + +impl fmt::Display for ProcessBuilder { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "`{}", self.program.to_string_lossy())?; + + for arg in &self.args { + write!(f, " {}", escape(arg.to_string_lossy()))?; + } + + write!(f, "`") + } +} + +impl ProcessBuilder { + /// (chainable) Set the executable for the process. + pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { + self.program = program.as_ref().to_os_string(); + self + } + + /// (chainable) Add an arg to the args list. + pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { + self.args.push(arg.as_ref().to_os_string()); + self + } + + /// (chainable) Add many args to the args list. + pub fn args>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { + self.args.extend(arguments.iter().map(|t| { + t.as_ref().to_os_string() + })); + self + } + + /// (chainable) Replace args with new args list + pub fn args_replace>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { + self.args = arguments.iter().map(|t| { + t.as_ref().to_os_string() + }).collect(); + self + } + + /// (chainable) Set the current working directory of the process + pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { + self.cwd = Some(path.as_ref().to_os_string()); + self + } + + /// (chainable) Set an environment variable for the process. + pub fn env>(&mut self, key: &str, + val: T) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), Some(val.as_ref().to_os_string())); + self + } + + /// (chainable) Unset an environment variable for the process. + pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), None); + self + } + + /// Get the executable name. + pub fn get_program(&self) -> &OsString { + &self.program + } + + /// Get the program arguments + pub fn get_args(&self) -> &[OsString] { + &self.args + } + + /// Get the current working directory for the process + pub fn get_cwd(&self) -> Option<&Path> { + self.cwd.as_ref().map(Path::new) + } + + /// Get an environment variable as the process will see it (will inherit from environment + /// unless explicitally unset). + pub fn get_env(&self, var: &str) -> Option { + self.env.get(var).cloned().or_else(|| Some(env::var_os(var))) + .and_then(|s| s) + } + + /// Get all environment variables explicitally set or unset for the process (not inherited + /// vars). + pub fn get_envs(&self) -> &HashMap> { &self.env } + + /// Set the `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { + self.jobserver = Some(jobserver.clone()); + self + } + + /// Run the process, waiting for completion, and mapping non-success exit codes to an error. + pub fn exec(&self) -> CargoResult<()> { + let mut command = self.build_command(); + let exit = command.status().chain_err(|| { + CargoErrorKind::ProcessErrorKind( + process_error(&format!("could not execute process `{}`", + self.debug_string()), None, None)) + })?; + + if exit.success() { + Ok(()) + } else { + Err(CargoErrorKind::ProcessErrorKind(process_error( + &format!("process didn't exit successfully: `{}`", self.debug_string()), + Some(&exit), None)).into()) + } + } + + /// On unix, executes the process using the unix syscall `execvp`, which will block this + /// process, and will only return if there is an error. On windows this is a synonym for + /// `exec`. + #[cfg(unix)] + pub fn exec_replace(&self) -> CargoResult<()> { + use std::os::unix::process::CommandExt; + + let mut command = self.build_command(); + let error = command.exec(); + Err(CargoError::with_chain(error, + CargoErrorKind::ProcessErrorKind(process_error( + &format!("could not execute process `{}`", self.debug_string()), None, None)))) + } + + /// On unix, executes the process using the unix syscall `execvp`, which will block this + /// process, and will only return if there is an error. On windows this is a synonym for + /// `exec`. + #[cfg(windows)] + pub fn exec_replace(&self) -> CargoResult<()> { + self.exec() + } + + /// Execute the process, returning the stdio output, or an error if non-zero exit status. + pub fn exec_with_output(&self) -> CargoResult { + let mut command = self.build_command(); + + let output = command.output().chain_err(|| { + CargoErrorKind::ProcessErrorKind( + process_error( + &format!("could not execute process `{}`", self.debug_string()), + None, None)) + })?; + + if output.status.success() { + Ok(output) + } else { + Err(CargoErrorKind::ProcessErrorKind(process_error( + &format!("process didn't exit successfully: `{}`", self.debug_string()), + Some(&output.status), Some(&output))).into()) + } + } + + /// Execute a command, passing each line of stdout and stderr to the supplied callbacks, which + /// can mutate the string data. + /// + /// If any invocations of these function return an error, it will be propagated. + /// + /// Optionally, output can be passed to errors using `print_output` + pub fn exec_with_streaming(&self, + on_stdout_line: &mut FnMut(&str) -> CargoResult<()>, + on_stderr_line: &mut FnMut(&str) -> CargoResult<()>, + print_output: bool) + -> CargoResult { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + + let mut cmd = self.build_command(); + cmd.stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .stdin(Stdio::null()); + + let mut callback_error = None; + let status = (|| { + let mut child = cmd.spawn()?; + let out = child.stdout.take().unwrap(); + let err = child.stderr.take().unwrap(); + read2(out, err, &mut |is_out, data, eof| { + let idx = if eof { + data.len() + } else { + match data.iter().rposition(|b| *b == b'\n') { + Some(i) => i + 1, + None => return, + } + }; + let data = data.drain(..idx); + let dst = if is_out {&mut stdout} else {&mut stderr}; + let start = dst.len(); + dst.extend(data); + for line in String::from_utf8_lossy(&dst[start..]).lines() { + if callback_error.is_some() { break } + let callback_result = if is_out { + on_stdout_line(line) + } else { + on_stderr_line(line) + }; + if let Err(e) = callback_result { + callback_error = Some(e); + } + } + })?; + child.wait() + })().chain_err(|| { + CargoErrorKind::ProcessErrorKind( + process_error(&format!("could not execute process `{}`", + self.debug_string()), + None, None)) + })?; + let output = Output { + stdout: stdout, + stderr: stderr, + status: status, + }; + + { + let to_print = if print_output { + Some(&output) + } else { + None + }; + if !output.status.success() { + return Err(CargoErrorKind::ProcessErrorKind(process_error( + &format!("process didn't exit successfully: `{}`", self.debug_string()), + Some(&output.status), to_print)).into()) + } else if let Some(e) = callback_error { + return Err(CargoError::with_chain(e, + CargoErrorKind::ProcessErrorKind(process_error( + &format!("failed to parse process output: `{}`", self.debug_string()), + Some(&output.status), to_print)))) + } + } + + Ok(output) + } + + /// Converts ProcessBuilder into a `std::process::Command`, and handles the jobserver if + /// present. + pub fn build_command(&self) -> Command { + let mut command = Command::new(&self.program); + if let Some(cwd) = self.get_cwd() { + command.current_dir(cwd); + } + for arg in &self.args { + command.arg(arg); + } + for (k, v) in &self.env { + match *v { + Some(ref v) => { command.env(k, v); } + None => { command.env_remove(k); } + } + } + if let Some(ref c) = self.jobserver { + c.configure(&mut command); + } + command + } + + /// Get the command line for the process as a string. + fn debug_string(&self) -> String { + let mut program = format!("{}", self.program.to_string_lossy()); + for arg in &self.args { + program.push(' '); + program.push_str(&format!("{}", arg.to_string_lossy())); + } + program + } +} + +/// A helper function to create a ProcessBuilder. +pub fn process>(cmd: T) -> ProcessBuilder { + ProcessBuilder { + program: cmd.as_ref().to_os_string(), + args: Vec::new(), + cwd: None, + env: HashMap::new(), + jobserver: None, + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/profile.rs b/collector/compile-benchmarks/cargo/src/cargo/util/profile.rs new file mode 100644 index 000000000..da90566f1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/profile.rs @@ -0,0 +1,71 @@ +use std::env; +use std::fmt; +use std::mem; +use std::time; +use std::iter::repeat; +use std::cell::RefCell; + +thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); +thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); + +type Message = (usize, u64, String); + +pub struct Profiler { + desc: String, +} + +fn enabled_level() -> Option { + env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok()) +} + +pub fn start(desc: T) -> Profiler { + if enabled_level().is_none() { return Profiler { desc: String::new() } } + + PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now())); + + Profiler { + desc: desc.to_string(), + } +} + +impl Drop for Profiler { + fn drop(&mut self) { + let enabled = match enabled_level() { + Some(i) => i, + None => return, + }; + + let start = PROFILE_STACK.with(|stack| stack.borrow_mut().pop().unwrap()); + let duration = start.elapsed(); + let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_nanos() / 1_000_000); + + let stack_len = PROFILE_STACK.with(|stack| stack.borrow().len()); + if stack_len == 0 { + fn print(lvl: usize, msgs: &[Message], enabled: usize) { + if lvl > enabled { return } + let mut last = 0; + for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { + if l != lvl { continue } + println!("{} {:6}ms - {}", + repeat(" ").take(lvl + 1).collect::(), + time, msg); + + print(lvl + 1, &msgs[last..i], enabled); + last = i; + } + + } + MESSAGES.with(|msgs_rc| { + let mut msgs = msgs_rc.borrow_mut(); + msgs.push((0, duration_ms, + mem::replace(&mut self.desc, String::new()))); + print(0, &msgs, enabled); + }); + } else { + MESSAGES.with(|msgs| { + let msg = mem::replace(&mut self.desc, String::new()); + msgs.borrow_mut().push((stack_len, duration_ms, msg)); + }); + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/read2.rs b/collector/compile-benchmarks/cargo/src/cargo/util/read2.rs new file mode 100644 index 000000000..b3aa7d8b2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/read2.rs @@ -0,0 +1,177 @@ +pub use self::imp::read2; + +#[cfg(unix)] +mod imp { + use std::io::prelude::*; + use std::io; + use std::mem; + use std::os::unix::prelude::*; + use std::process::{ChildStdout, ChildStderr}; + use libc; + + pub fn read2(mut out_pipe: ChildStdout, + mut err_pipe: ChildStderr, + data: &mut FnMut(bool, &mut Vec, bool)) -> io::Result<()> { + unsafe { + libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + } + + let mut out_done = false; + let mut err_done = false; + let mut out = Vec::new(); + let mut err = Vec::new(); + + let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; + fds[0].fd = out_pipe.as_raw_fd(); + fds[0].events = libc::POLLIN; + fds[1].fd = err_pipe.as_raw_fd(); + fds[1].events = libc::POLLIN; + loop { + // wait for either pipe to become readable using `select` + let r = unsafe { libc::poll(fds.as_mut_ptr(), 2, -1) }; + if r == -1 { + let err = io::Error::last_os_error(); + if err.kind() == io::ErrorKind::Interrupted { + continue + } + return Err(err) + } + + // Read as much as we can from each pipe, ignoring EWOULDBLOCK or + // EAGAIN. If we hit EOF, then this will happen because the underlying + // reader will return Ok(0), in which case we'll see `Ok` ourselves. In + // this case we flip the other fd back into blocking mode and read + // whatever's leftover on that file descriptor. + let handle = |res: io::Result<_>| { + match res { + Ok(_) => Ok(true), + Err(e) => { + if e.kind() == io::ErrorKind::WouldBlock { + Ok(false) + } else { + Err(e) + } + } + } + }; + if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { + out_done = true; + } + data(true, &mut out, out_done); + if !err_done && fds[1].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { + err_done = true; + } + data(false, &mut err, err_done); + + if out_done && err_done { + return Ok(()) + } + } + } +} + +#[cfg(windows)] +mod imp { + extern crate miow; + extern crate winapi; + + use std::io; + use std::os::windows::prelude::*; + use std::process::{ChildStdout, ChildStderr}; + use std::slice; + + use self::miow::iocp::{CompletionPort, CompletionStatus}; + use self::miow::pipe::NamedPipe; + use self::miow::Overlapped; + use self::winapi::ERROR_BROKEN_PIPE; + + struct Pipe<'a> { + dst: &'a mut Vec, + overlapped: Overlapped, + pipe: NamedPipe, + done: bool, + } + + pub fn read2(out_pipe: ChildStdout, + err_pipe: ChildStderr, + data: &mut FnMut(bool, &mut Vec, bool)) -> io::Result<()> { + let mut out = Vec::new(); + let mut err = Vec::new(); + + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; + + unsafe { + let mut out_pipe = Pipe::new(out_pipe, &mut out); + let mut err_pipe = Pipe::new(err_pipe, &mut err); + + out_pipe.read()?; + err_pipe.read()?; + + let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; + + while !out_pipe.done || !err_pipe.done { + for status in port.get_many(&mut status, None)? { + if status.token() == 0 { + out_pipe.complete(status); + data(true, out_pipe.dst, out_pipe.done); + out_pipe.read()?; + } else { + err_pipe.complete(status); + data(false, err_pipe.dst, err_pipe.done); + err_pipe.read()?; + } + } + } + + Ok(()) + } + } + + impl<'a> Pipe<'a> { + unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { + Pipe { + dst: dst, + pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + overlapped: Overlapped::zero(), + done: false, + } + } + + unsafe fn read(&mut self) -> io::Result<()> { + let dst = slice_to_end(self.dst); + match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + Ok(_) => Ok(()), + Err(e) => { + if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { + self.done = true; + Ok(()) + } else { + Err(e) + } + } + } + } + + unsafe fn complete(&mut self, status: &CompletionStatus) { + let prev = self.dst.len(); + self.dst.set_len(prev + status.bytes_transferred() as usize); + if status.bytes_transferred() == 0 { + self.done = true; + } + } + } + + unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { + if v.capacity() == 0 { + v.reserve(16); + } + if v.capacity() == v.len() { + v.reserve(1); + } + slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize), + v.capacity() - v.len()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/rustc.rs b/collector/compile-benchmarks/cargo/src/cargo/util/rustc.rs new file mode 100644 index 000000000..a9b65c959 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/rustc.rs @@ -0,0 +1,62 @@ +use std::path::PathBuf; + +use util::{self, CargoResult, internal, ProcessBuilder}; + +/// Information on the `rustc` executable +#[derive(Debug)] +pub struct Rustc { + /// The location of the exe + pub path: PathBuf, + /// An optional program that will be passed the path of the rust exe as its first argument, and + /// rustc args following this. + pub wrapper: Option, + /// Verbose version information (the output of `rustc -vV`) + pub verbose_version: String, + /// The host triple (arch-platform-OS), this comes from verbose_version. + pub host: String, +} + +impl Rustc { + /// Run the compiler at `path` to learn various pieces of information about + /// it, with an optional wrapper. + /// + /// If successful this function returns a description of the compiler along + /// with a list of its capabilities. + pub fn new(path: PathBuf, wrapper: Option) -> CargoResult { + let mut cmd = util::process(&path); + cmd.arg("-vV"); + + let output = cmd.exec_with_output()?; + + let verbose_version = String::from_utf8(output.stdout).map_err(|_| { + internal("rustc -v didn't return utf8 output") + })?; + + let host = { + let triple = verbose_version.lines().find(|l| { + l.starts_with("host: ") + }).map(|l| &l[6..]).ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?; + triple.to_string() + }; + + Ok(Rustc { + path: path, + wrapper: wrapper, + verbose_version: verbose_version, + host: host, + }) + } + + /// Get a process builder set up to use the found rustc version, with a wrapper if Some + pub fn process(&self) -> ProcessBuilder { + if let Some(ref wrapper) = self.wrapper { + let mut cmd = util::process(wrapper); + { + cmd.arg(&self.path); + } + cmd + } else { + util::process(&self.path) + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/sha256.rs b/collector/compile-benchmarks/cargo/src/cargo/util/sha256.rs new file mode 100644 index 000000000..c805d7f1a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/sha256.rs @@ -0,0 +1,23 @@ +extern crate crypto_hash; +use self::crypto_hash::{Hasher,Algorithm}; +use std::io::Write; + +pub struct Sha256(Hasher); + +impl Sha256 { + pub fn new() -> Sha256 { + let hasher = Hasher::new(Algorithm::SHA256); + Sha256(hasher) + } + + pub fn update(&mut self, bytes: &[u8]) { + let _ = self.0.write_all(bytes); + } + + pub fn finish(&mut self) -> [u8; 32] { + let mut ret = [0u8; 32]; + let data = self.0.finish(); + ret.copy_from_slice(&data[..]); + ret + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/to_semver.rs b/collector/compile-benchmarks/cargo/src/cargo/util/to_semver.rs new file mode 100644 index 000000000..ad6aff16e --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/to_semver.rs @@ -0,0 +1,30 @@ +use semver::Version; + +pub trait ToSemver { + fn to_semver(self) -> Result; +} + +impl ToSemver for Version { + fn to_semver(self) -> Result { Ok(self) } +} + +impl<'a> ToSemver for &'a str { + fn to_semver(self) -> Result { + match Version::parse(self) { + Ok(v) => Ok(v), + Err(..) => Err(format!("cannot parse '{}' as a semver", self)), + } + } +} + +impl<'a> ToSemver for &'a String { + fn to_semver(self) -> Result { + (**self).to_semver() + } +} + +impl<'a> ToSemver for &'a Version { + fn to_semver(self) -> Result { + Ok(self.clone()) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/to_url.rs b/collector/compile-benchmarks/cargo/src/cargo/util/to_url.rs new file mode 100644 index 000000000..f6a4d23a5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/to_url.rs @@ -0,0 +1,27 @@ +use std::path::Path; + +use url::Url; + +use util::CargoResult; + +/// A type that can be converted to a Url +pub trait ToUrl { + /// Performs the conversion + fn to_url(self) -> CargoResult; +} + +impl<'a> ToUrl for &'a str { + fn to_url(self) -> CargoResult { + Url::parse(self).map_err(|s| { + format!("invalid url `{}`: {}", self, s).into() + }) + } +} + +impl<'a> ToUrl for &'a Path { + fn to_url(self) -> CargoResult { + Url::from_file_path(self).map_err(|()| { + format!("invalid path url `{}`", self.display()).into() + }) + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/toml/mod.rs b/collector/compile-benchmarks/cargo/src/cargo/util/toml/mod.rs new file mode 100644 index 000000000..32122444d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/toml/mod.rs @@ -0,0 +1,1085 @@ +use std::collections::{HashMap, BTreeMap, HashSet, BTreeSet}; +use std::fmt; +use std::fs; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::str; + +use semver::{self, VersionReq}; +use serde::ser; +use serde::de::{self, Deserialize}; +use serde_ignored; +use toml; +use url::Url; + +use core::{SourceId, Profiles, PackageIdSpec, GitReference, WorkspaceConfig, WorkspaceRootConfig}; +use core::{Summary, Manifest, Target, Dependency, PackageId}; +use core::{EitherManifest, VirtualManifest, Features}; +use core::dependency::{Kind, Platform}; +use core::manifest::{LibKind, Profile, ManifestMetadata}; +use sources::CRATES_IO; +use util::paths; +use util::{self, ToUrl, Config}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +mod targets; +use self::targets::targets; + +pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config) + -> CargoResult<(EitherManifest, Vec)> { + trace!("read_manifest; path={}; source-id={}", path.display(), source_id); + let contents = paths::read(path)?; + + do_read_manifest(&contents, path, source_id, config).chain_err(|| { + format!("failed to parse manifest at `{}`", path.display()) + }) +} + +fn do_read_manifest(contents: &str, + manifest_file: &Path, + source_id: &SourceId, + config: &Config) + -> CargoResult<(EitherManifest, Vec)> { + let package_root = manifest_file.parent().unwrap(); + + let toml = { + let pretty_filename = + util::without_prefix(manifest_file, config.cwd()).unwrap_or(manifest_file); + parse(contents, pretty_filename, config)? + }; + + let mut unused = BTreeSet::new(); + let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| { + let mut key = String::new(); + stringify(&mut key, &path); + unused.insert(key); + })?; + + let manifest = Rc::new(manifest); + return match TomlManifest::to_real_manifest(&manifest, + source_id, + package_root, + config) { + Ok((mut manifest, paths)) => { + for key in unused { + manifest.add_warning(format!("unused manifest key: {}", key)); + } + if !manifest.targets().iter().any(|t| !t.is_custom_build()) { + bail!("no targets specified in the manifest\n \ + either src/lib.rs, src/main.rs, a [lib] section, or \ + [[bin]] section must be present") + } + Ok((EitherManifest::Real(manifest), paths)) + } + Err(e) => { + match TomlManifest::to_virtual_manifest(&manifest, + source_id, + package_root, + config) { + Ok((m, paths)) => Ok((EitherManifest::Virtual(m), paths)), + Err(..) => Err(e), + } + } + }; + + fn stringify(dst: &mut String, path: &serde_ignored::Path) { + use serde_ignored::Path; + + match *path { + Path::Root => {} + Path::Seq { parent, index } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(&index.to_string()); + } + Path::Map { parent, ref key } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(key); + } + Path::Some { parent } | + Path::NewtypeVariant { parent } | + Path::NewtypeStruct { parent } => stringify(dst, parent), + } + } +} + +pub fn parse(toml: &str, + file: &Path, + config: &Config) -> CargoResult { + let first_error = match toml.parse() { + Ok(ret) => return Ok(ret), + Err(e) => e, + }; + + let mut second_parser = toml::de::Deserializer::new(toml); + second_parser.set_require_newline_after_table(false); + if let Ok(ret) = toml::Value::deserialize(&mut second_parser) { + let msg = format!("\ +TOML file found which contains invalid syntax and will soon not parse +at `{}`. + +The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future.", file.display()); + config.shell().warn(&msg)?; + return Ok(ret) + } + + Err(first_error).chain_err(|| { + "could not parse input as TOML" + }) +} + +type TomlLibTarget = TomlTarget; +type TomlBinTarget = TomlTarget; +type TomlExampleTarget = TomlTarget; +type TomlTestTarget = TomlTarget; +type TomlBenchTarget = TomlTarget; + +#[derive(Debug, Serialize)] +#[serde(untagged)] +pub enum TomlDependency { + Simple(String), + Detailed(DetailedTomlDependency) +} + +impl<'de> de::Deserialize<'de> for TomlDependency { + fn deserialize(deserializer: D) -> Result + where D: de::Deserializer<'de> + { + struct TomlDependencyVisitor; + + impl<'de> de::Visitor<'de> for TomlDependencyVisitor { + type Value = TomlDependency; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a version string like \"0.9.8\" or a \ + detailed dependency like { version = \"0.9.8\" }") + } + + fn visit_str(self, s: &str) -> Result + where E: de::Error + { + Ok(TomlDependency::Simple(s.to_owned())) + } + + fn visit_map(self, map: V) -> Result + where V: de::MapAccess<'de> + { + let mvd = de::value::MapAccessDeserializer::new(map); + DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed) + } + } + + deserializer.deserialize_any(TomlDependencyVisitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct DetailedTomlDependency { + version: Option, + path: Option, + git: Option, + branch: Option, + tag: Option, + rev: Option, + features: Option>, + optional: Option, + #[serde(rename = "default-features")] + default_features: Option, + #[serde(rename = "default_features")] + default_features2: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TomlManifest { + package: Option>, + project: Option>, + profile: Option, + lib: Option, + bin: Option>, + example: Option>, + test: Option>, + bench: Option>, + dependencies: Option>, + #[serde(rename = "dev-dependencies")] + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, + #[serde(rename = "build-dependencies")] + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + features: Option>>, + target: Option>, + replace: Option>, + patch: Option>>, + workspace: Option, + badges: Option>>, + #[serde(rename = "cargo-features")] + cargo_features: Option>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct TomlProfiles { + test: Option, + doc: Option, + bench: Option, + dev: Option, + release: Option, +} + +#[derive(Clone, Debug)] +pub struct TomlOptLevel(String); + +impl<'de> de::Deserialize<'de> for TomlOptLevel { + fn deserialize(d: D) -> Result + where D: de::Deserializer<'de> + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = TomlOptLevel; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("an optimization level") + } + + fn visit_i64(self, value: i64) -> Result + where E: de::Error + { + Ok(TomlOptLevel(value.to_string())) + } + + fn visit_str(self, value: &str) -> Result + where E: de::Error + { + if value == "s" || value == "z" { + Ok(TomlOptLevel(value.to_string())) + } else { + Err(E::custom(format!("must be an integer, `z`, or `s`, \ + but found: {}", value))) + } + } + } + + d.deserialize_u32(Visitor) + } +} + +impl ser::Serialize for TomlOptLevel { + fn serialize(&self, serializer: S) -> Result + where S: ser::Serializer, + { + match self.0.parse::() { + Ok(n) => n.serialize(serializer), + Err(_) => self.0.serialize(serializer), + } + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum U32OrBool { + U32(u32), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for U32OrBool { + fn deserialize(deserializer: D) -> Result + where D: de::Deserializer<'de> + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = U32OrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a boolean or an integer") + } + + fn visit_i64(self, u: i64) -> Result + where E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + + fn visit_u64(self, u: u64) -> Result + where E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + + fn visit_bool(self, b: bool) -> Result + where E: de::Error, + { + Ok(U32OrBool::Bool(b)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct TomlProfile { + #[serde(rename = "opt-level")] + opt_level: Option, + lto: Option, + #[serde(rename = "codegen-units")] + codegen_units: Option, + debug: Option, + #[serde(rename = "debug-assertions")] + debug_assertions: Option, + rpath: Option, + panic: Option, + #[serde(rename = "overflow-checks")] + overflow_checks: Option, +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum StringOrBool { + String(String), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for StringOrBool { + fn deserialize(deserializer: D) -> Result + where D: de::Deserializer<'de> + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = StringOrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a boolean or a string") + } + + fn visit_str(self, s: &str) -> Result + where E: de::Error, + { + Ok(StringOrBool::String(s.to_string())) + } + + fn visit_bool(self, b: bool) -> Result + where E: de::Error, + { + Ok(StringOrBool::Bool(b)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct TomlProject { + name: String, + version: semver::Version, + authors: Option>, + build: Option, + links: Option, + exclude: Option>, + include: Option>, + publish: Option, + workspace: Option, + #[serde(rename = "im-a-teapot")] + im_a_teapot: Option, + + // package metadata + description: Option, + homepage: Option, + documentation: Option, + readme: Option, + keywords: Option>, + categories: Option>, + license: Option, + #[serde(rename = "license-file")] + license_file: Option, + repository: Option, + metadata: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TomlWorkspace { + members: Option>, + exclude: Option>, +} + +impl TomlProject { + pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult { + PackageId::new(&self.name, self.version.clone(), source_id) + } +} + +struct Context<'a, 'b> { + pkgid: Option<&'a PackageId>, + deps: &'a mut Vec, + source_id: &'a SourceId, + nested_paths: &'a mut Vec, + config: &'b Config, + warnings: &'a mut Vec, + platform: Option, + root: &'a Path, +} + +impl TomlManifest { + pub fn prepare_for_publish(&self) -> TomlManifest { + let mut package = self.package.as_ref() + .or_else(|| self.project.as_ref()) + .unwrap() + .clone(); + package.workspace = None; + return TomlManifest { + package: Some(package), + project: None, + profile: self.profile.clone(), + lib: self.lib.clone(), + bin: self.bin.clone(), + example: self.example.clone(), + test: self.test.clone(), + bench: self.bench.clone(), + dependencies: map_deps(self.dependencies.as_ref()), + dev_dependencies: map_deps(self.dev_dependencies.as_ref() + .or_else(|| self.dev_dependencies2.as_ref())), + dev_dependencies2: None, + build_dependencies: map_deps(self.build_dependencies.as_ref() + .or_else(|| self.build_dependencies2.as_ref())), + build_dependencies2: None, + features: self.features.clone(), + target: self.target.as_ref().map(|target_map| { + target_map.iter().map(|(k, v)| { + (k.clone(), TomlPlatform { + dependencies: map_deps(v.dependencies.as_ref()), + dev_dependencies: map_deps(v.dev_dependencies.as_ref() + .or_else(|| v.dev_dependencies2.as_ref())), + dev_dependencies2: None, + build_dependencies: map_deps(v.build_dependencies.as_ref() + .or_else(|| v.build_dependencies2.as_ref())), + build_dependencies2: None, + }) + }).collect() + }), + replace: None, + patch: None, + workspace: None, + badges: self.badges.clone(), + cargo_features: self.cargo_features.clone(), + }; + + fn map_deps(deps: Option<&BTreeMap>) + -> Option> + { + let deps = match deps { + Some(deps) => deps, + None => return None + }; + Some(deps.iter().map(|(k, v)| (k.clone(), map_dependency(v))).collect()) + } + + fn map_dependency(dep: &TomlDependency) -> TomlDependency { + match *dep { + TomlDependency::Detailed(ref d) => { + let mut d = d.clone(); + d.path.take(); // path dependencies become crates.io deps + TomlDependency::Detailed(d) + } + TomlDependency::Simple(ref s) => { + TomlDependency::Detailed(DetailedTomlDependency { + version: Some(s.clone()), + ..Default::default() + }) + } + } + } + } + + fn to_real_manifest(me: &Rc, + source_id: &SourceId, + package_root: &Path, + config: &Config) + -> CargoResult<(Manifest, Vec)> { + let mut nested_paths = vec![]; + let mut warnings = vec![]; + let mut errors = vec![]; + + let project = me.project.as_ref().or_else(|| me.package.as_ref()); + let project = project.ok_or_else(|| { + CargoError::from("no `package` section found.") + })?; + + let package_name = project.name.trim(); + if package_name.is_empty() { + bail!("package name cannot be an empty string.") + } + + let pkgid = project.to_package_id(source_id)?; + + // If we have no lib at all, use the inferred lib if available + // If we have a lib with a path, we're done + // If we have a lib with no path, use the inferred lib or_else package name + let targets = targets(me, package_name, package_root, &project.build, + &mut warnings, &mut errors)?; + + if targets.is_empty() { + debug!("manifest has no build targets"); + } + + if let Err(e) = unique_build_targets(&targets, package_root) { + warnings.push(format!("file found to be present in multiple \ + build targets: {}", e)); + } + + let mut deps = Vec::new(); + let replace; + let patch; + + { + + let mut cx = Context { + pkgid: Some(&pkgid), + deps: &mut deps, + source_id: source_id, + nested_paths: &mut nested_paths, + config: config, + warnings: &mut warnings, + platform: None, + root: package_root, + }; + + fn process_dependencies( + cx: &mut Context, + new_deps: Option<&BTreeMap>, + kind: Option) + -> CargoResult<()> + { + let dependencies = match new_deps { + Some(dependencies) => dependencies, + None => return Ok(()) + }; + for (n, v) in dependencies.iter() { + let dep = v.to_dependency(n, cx, kind)?; + cx.deps.push(dep); + } + + Ok(()) + } + + // Collect the deps + process_dependencies(&mut cx, me.dependencies.as_ref(), + None)?; + let dev_deps = me.dev_dependencies.as_ref() + .or_else(|| me.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + let build_deps = me.build_dependencies.as_ref() + .or_else(|| me.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + + for (name, platform) in me.target.iter().flat_map(|t| t) { + cx.platform = Some(name.parse()?); + process_dependencies(&mut cx, platform.dependencies.as_ref(), + None)?; + let build_deps = platform.build_dependencies.as_ref() + .or_else(|| platform.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + let dev_deps = platform.dev_dependencies.as_ref() + .or_else(|| platform.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + } + + replace = me.replace(&mut cx)?; + patch = me.patch(&mut cx)?; + } + + { + let mut names_sources = BTreeMap::new(); + for dep in &deps { + let name = dep.name(); + let prev = names_sources.insert(name, dep.source_id()); + if prev.is_some() && prev != Some(dep.source_id()) { + bail!("Dependency '{}' has different source paths depending on the build \ + target. Each dependency must have a single canonical source path \ + irrespective of build target.", name); + } + } + } + + let exclude = project.exclude.clone().unwrap_or_default(); + let include = project.include.clone().unwrap_or_default(); + + let summary = Summary::new(pkgid, deps, me.features.clone() + .unwrap_or_else(BTreeMap::new))?; + let metadata = ManifestMetadata { + description: project.description.clone(), + homepage: project.homepage.clone(), + documentation: project.documentation.clone(), + readme: project.readme.clone(), + authors: project.authors.clone().unwrap_or_default(), + license: project.license.clone(), + license_file: project.license_file.clone(), + repository: project.repository.clone(), + keywords: project.keywords.clone().unwrap_or_default(), + categories: project.categories.clone().unwrap_or_default(), + badges: me.badges.clone().unwrap_or_default(), + }; + + let workspace_config = match (me.workspace.as_ref(), + project.workspace.as_ref()) { + (Some(config), None) => { + WorkspaceConfig::Root( + WorkspaceRootConfig::new(&package_root, &config.members, &config.exclude) + ) + } + (None, root) => { + WorkspaceConfig::Member { root: root.cloned() } + } + (Some(..), Some(..)) => { + bail!("cannot configure both `package.workspace` and \ + `[workspace]`, only one can be specified") + } + }; + let profiles = build_profiles(&me.profile); + let publish = project.publish.unwrap_or(true); + let empty = Vec::new(); + let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); + let features = Features::new(cargo_features, &mut warnings)?; + let mut manifest = Manifest::new(summary, + targets, + exclude, + include, + project.links.clone(), + metadata, + profiles, + publish, + replace, + patch, + workspace_config, + features, + project.im_a_teapot, + Rc::clone(me)); + if project.license_file.is_some() && project.license.is_some() { + manifest.add_warning("only one of `license` or \ + `license-file` is necessary".to_string()); + } + for warning in warnings { + manifest.add_warning(warning); + } + for error in errors { + manifest.add_critical_warning(error); + } + + manifest.feature_gate()?; + + Ok((manifest, nested_paths)) + } + + fn to_virtual_manifest(me: &Rc, + source_id: &SourceId, + root: &Path, + config: &Config) + -> CargoResult<(VirtualManifest, Vec)> { + if me.project.is_some() { + bail!("virtual manifests do not define [project]"); + } + if me.package.is_some() { + bail!("virtual manifests do not define [package]"); + } + if me.lib.is_some() { + bail!("virtual manifests do not specify [lib]"); + } + if me.bin.is_some() { + bail!("virtual manifests do not specify [[bin]]"); + } + if me.example.is_some() { + bail!("virtual manifests do not specify [[example]]"); + } + if me.test.is_some() { + bail!("virtual manifests do not specify [[test]]"); + } + if me.bench.is_some() { + bail!("virtual manifests do not specify [[bench]]"); + } + + let mut nested_paths = Vec::new(); + let mut warnings = Vec::new(); + let mut deps = Vec::new(); + let (replace, patch) = { + let mut cx = Context { + pkgid: None, + deps: &mut deps, + source_id: source_id, + nested_paths: &mut nested_paths, + config: config, + warnings: &mut warnings, + platform: None, + root: root + }; + (me.replace(&mut cx)?, me.patch(&mut cx)?) + }; + let profiles = build_profiles(&me.profile); + let workspace_config = match me.workspace { + Some(ref config) => { + WorkspaceConfig::Root( + WorkspaceRootConfig::new(&root, &config.members, &config.exclude) + ) + } + None => { + bail!("virtual manifests must be configured with [workspace]"); + } + }; + Ok((VirtualManifest::new(replace, patch, workspace_config, profiles), nested_paths)) + } + + fn replace(&self, cx: &mut Context) + -> CargoResult> { + if self.patch.is_some() && self.replace.is_some() { + bail!("cannot specify both [replace] and [patch]"); + } + let mut replace = Vec::new(); + for (spec, replacement) in self.replace.iter().flat_map(|x| x) { + let mut spec = PackageIdSpec::parse(spec).chain_err(|| { + format!("replacements must specify a valid semver \ + version to replace, but `{}` does not", + spec) + })?; + if spec.url().is_none() { + spec.set_url(CRATES_IO.parse().unwrap()); + } + + let version_specified = match *replacement { + TomlDependency::Detailed(ref d) => d.version.is_some(), + TomlDependency::Simple(..) => true, + }; + if version_specified { + bail!("replacements cannot specify a version \ + requirement, but found one for `{}`", spec); + } + + let mut dep = replacement.to_dependency(spec.name(), cx, None)?; + { + let version = spec.version().ok_or_else(|| { + CargoError::from(format!("replacements must specify a version \ + to replace, but `{}` does not", + spec)) + })?; + dep.set_version_req(VersionReq::exact(version)); + } + replace.push((spec, dep)); + } + Ok(replace) + } + + fn patch(&self, cx: &mut Context) + -> CargoResult>> { + let mut patch = HashMap::new(); + for (url, deps) in self.patch.iter().flat_map(|x| x) { + let url = match &url[..] { + "crates-io" => CRATES_IO.parse().unwrap(), + _ => url.to_url()?, + }; + patch.insert(url, deps.iter().map(|(name, dep)| { + dep.to_dependency(name, cx, None) + }).collect::>>()?); + } + Ok(patch) + } + + fn maybe_custom_build(&self, + build: &Option, + package_root: &Path) + -> Option { + let build_rs = package_root.join("build.rs"); + match *build { + Some(StringOrBool::Bool(false)) => None, // explicitly no build script + Some(StringOrBool::Bool(true)) => Some(build_rs.into()), + Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)), + None => { + match fs::metadata(&build_rs) { + // If there is a build.rs file next to the Cargo.toml, assume it is + // a build script + Ok(ref e) if e.is_file() => Some(build_rs.into()), + Ok(_) | Err(_) => None, + } + } + } + } +} + +/// Will check a list of build targets, and make sure the target names are unique within a vector. +/// If not, the name of the offending build target is returned. +fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> { + let mut seen = HashSet::new(); + for v in targets.iter().map(|e| package_root.join(e.src_path())) { + if !seen.insert(v.clone()) { + return Err(v.display().to_string()); + } + } + Ok(()) +} + +impl TomlDependency { + fn to_dependency(&self, + name: &str, + cx: &mut Context, + kind: Option) + -> CargoResult { + let details = match *self { + TomlDependency::Simple(ref version) => DetailedTomlDependency { + version: Some(version.clone()), + .. Default::default() + }, + TomlDependency::Detailed(ref details) => details.clone(), + }; + + if details.version.is_none() && details.path.is_none() && + details.git.is_none() { + let msg = format!("dependency ({}) specified without \ + providing a local path, Git repository, or \ + version to use. This will be considered an \ + error in future versions", name); + cx.warnings.push(msg); + } + + if details.git.is_none() { + let git_only_keys = [ + (&details.branch, "branch"), + (&details.tag, "tag"), + (&details.rev, "rev") + ]; + + for &(key, key_name) in &git_only_keys { + if key.is_some() { + let msg = format!("key `{}` is ignored for dependency ({}). \ + This will be considered an error in future versions", + key_name, name); + cx.warnings.push(msg) + } + } + } + + let new_source_id = match (details.git.as_ref(), details.path.as_ref()) { + (Some(git), maybe_path) => { + if maybe_path.is_some() { + let msg = format!("dependency ({}) specification is ambiguous. \ + Only one of `git` or `path` is allowed. \ + This will be considered an error in future versions", name); + cx.warnings.push(msg) + } + + let n_details = [&details.branch, &details.tag, &details.rev] + .iter() + .filter(|d| d.is_some()) + .count(); + + if n_details > 1 { + let msg = format!("dependency ({}) specification is ambiguous. \ + Only one of `branch`, `tag` or `rev` is allowed. \ + This will be considered an error in future versions", name); + cx.warnings.push(msg) + } + + let reference = details.branch.clone().map(GitReference::Branch) + .or_else(|| details.tag.clone().map(GitReference::Tag)) + .or_else(|| details.rev.clone().map(GitReference::Rev)) + .unwrap_or_else(|| GitReference::Branch("master".to_string())); + let loc = git.to_url()?; + SourceId::for_git(&loc, reference)? + }, + (None, Some(path)) => { + cx.nested_paths.push(PathBuf::from(path)); + // If the source id for the package we're parsing is a path + // source, then we normalize the path here to get rid of + // components like `..`. + // + // The purpose of this is to get a canonical id for the package + // that we're depending on to ensure that builds of this package + // always end up hashing to the same value no matter where it's + // built from. + if cx.source_id.is_path() { + let path = cx.root.join(path); + let path = util::normalize_path(&path); + SourceId::for_path(&path)? + } else { + cx.source_id.clone() + } + }, + (None, None) => SourceId::crates_io(cx.config)?, + }; + + let version = details.version.as_ref().map(|v| &v[..]); + let mut dep = match cx.pkgid { + Some(id) => { + Dependency::parse(name, version, &new_source_id, + id, cx.config)? + } + None => Dependency::parse_no_deprecated(name, version, &new_source_id)?, + }; + dep.set_features(details.features.unwrap_or_default()) + .set_default_features(details.default_features + .or(details.default_features2) + .unwrap_or(true)) + .set_optional(details.optional.unwrap_or(false)) + .set_platform(cx.platform.clone()); + if let Some(kind) = kind { + dep.set_kind(kind); + } + Ok(dep) + } +} + +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +struct TomlTarget { + name: Option, + + // The intention was to only accept `crate-type` here but historical + // versions of Cargo also accepted `crate_type`, so look for both. + #[serde(rename = "crate-type")] + crate_type: Option>, + #[serde(rename = "crate_type")] + crate_type2: Option>, + + path: Option, + test: Option, + doctest: Option, + bench: Option, + doc: Option, + plugin: Option, + #[serde(rename = "proc-macro")] + proc_macro: Option, + #[serde(rename = "proc_macro")] + proc_macro2: Option, + harness: Option, + #[serde(rename = "required-features")] + required_features: Option>, +} + +#[derive(Clone)] +struct PathValue(PathBuf); + +impl<'de> de::Deserialize<'de> for PathValue { + fn deserialize(deserializer: D) -> Result + where D: de::Deserializer<'de> + { + Ok(PathValue(String::deserialize(deserializer)?.into())) + } +} + +impl ser::Serialize for PathValue { + fn serialize(&self, serializer: S) -> Result + where S: ser::Serializer, + { + self.0.serialize(serializer) + } +} + +/// Corresponds to a `target` entry, but `TomlTarget` is already used. +#[derive(Serialize, Deserialize, Debug)] +struct TomlPlatform { + dependencies: Option>, + #[serde(rename = "build-dependencies")] + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + #[serde(rename = "dev-dependencies")] + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, +} + +impl TomlTarget { + fn new() -> TomlTarget { + TomlTarget::default() + } + + fn name(&self) -> String { + match self.name { + Some(ref name) => name.clone(), + None => panic!("target name is required") + } + } + + fn proc_macro(&self) -> Option { + self.proc_macro.or(self.proc_macro2) + } + + fn crate_types(&self) -> Option<&Vec> { + self.crate_type.as_ref().or_else(|| self.crate_type2.as_ref()) + } +} + +impl fmt::Debug for PathValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +fn build_profiles(profiles: &Option) -> Profiles { + let profiles = profiles.as_ref(); + let mut profiles = Profiles { + release: merge(Profile::default_release(), + profiles.and_then(|p| p.release.as_ref())), + dev: merge(Profile::default_dev(), + profiles.and_then(|p| p.dev.as_ref())), + test: merge(Profile::default_test(), + profiles.and_then(|p| p.test.as_ref())), + test_deps: merge(Profile::default_dev(), + profiles.and_then(|p| p.dev.as_ref())), + bench: merge(Profile::default_bench(), + profiles.and_then(|p| p.bench.as_ref())), + bench_deps: merge(Profile::default_release(), + profiles.and_then(|p| p.release.as_ref())), + doc: merge(Profile::default_doc(), + profiles.and_then(|p| p.doc.as_ref())), + custom_build: Profile::default_custom_build(), + check: merge(Profile::default_check(), + profiles.and_then(|p| p.dev.as_ref())), + doctest: Profile::default_doctest(), + }; + // The test/bench targets cannot have panic=abort because they'll all get + // compiled with --test which requires the unwind runtime currently + profiles.test.panic = None; + profiles.bench.panic = None; + profiles.test_deps.panic = None; + profiles.bench_deps.panic = None; + return profiles; + + fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile { + let &TomlProfile { + ref opt_level, lto, codegen_units, ref debug, debug_assertions, rpath, + ref panic, ref overflow_checks, + } = match toml { + Some(toml) => toml, + None => return profile, + }; + let debug = match *debug { + Some(U32OrBool::U32(debug)) => Some(Some(debug)), + Some(U32OrBool::Bool(true)) => Some(Some(2)), + Some(U32OrBool::Bool(false)) => Some(None), + None => None, + }; + Profile { + opt_level: opt_level.clone().unwrap_or(TomlOptLevel(profile.opt_level)).0, + lto: lto.unwrap_or(profile.lto), + codegen_units: codegen_units, + rustc_args: None, + rustdoc_args: None, + debuginfo: debug.unwrap_or(profile.debuginfo), + debug_assertions: debug_assertions.unwrap_or(profile.debug_assertions), + overflow_checks: overflow_checks.unwrap_or(profile.overflow_checks), + rpath: rpath.unwrap_or(profile.rpath), + test: profile.test, + doc: profile.doc, + run_custom_build: profile.run_custom_build, + check: profile.check, + panic: panic.clone().or(profile.panic), + } + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/toml/targets.rs b/collector/compile-benchmarks/cargo/src/cargo/util/toml/targets.rs new file mode 100644 index 000000000..65393527a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/toml/targets.rs @@ -0,0 +1,493 @@ +//! This module implements Cargo conventions for directory layout: +//! +//! * `src/lib.rs` is a library +//! * `src/main.rs` is a binary +//! * `src/bin/*.rs` are binaries +//! * `examples/*.rs` are examples +//! * `tests/*.rs` are integration tests +//! * `benches/*.rs` are benchmarks +//! +//! It is a bit tricky because we need match explicit information from `Cargo.toml` +//! with implicit info in directory layout. + +use std::path::{Path, PathBuf}; +use std::fs::{self, DirEntry}; +use std::collections::HashSet; + +use core::Target; +use ops::is_bad_artifact_name; +use util::errors::CargoResult; +use super::{TomlTarget, LibKind, PathValue, TomlManifest, StringOrBool, + TomlLibTarget, TomlBinTarget, TomlBenchTarget, TomlExampleTarget, TomlTestTarget}; + + +pub fn targets(manifest: &TomlManifest, + package_name: &str, + package_root: &Path, + custom_build: &Option, + warnings: &mut Vec, + errors: &mut Vec) + -> CargoResult> { + let mut targets = Vec::new(); + + let has_lib; + + if let Some(target) = clean_lib(manifest.lib.as_ref(), package_root, package_name, warnings)? { + targets.push(target); + has_lib = true; + } else { + has_lib = false; + } + + targets.extend( + clean_bins(manifest.bin.as_ref(), package_root, package_name, warnings, has_lib)? + ); + + targets.extend( + clean_examples(manifest.example.as_ref(), package_root, errors)? + ); + + targets.extend( + clean_tests(manifest.test.as_ref(), package_root, errors)? + ); + + targets.extend( + clean_benches(manifest.bench.as_ref(), package_root, warnings, errors)? + ); + + // processing the custom build script + if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) { + let name = format!("build-script-{}", + custom_build.file_stem().and_then(|s| s.to_str()).unwrap_or("")); + targets.push(Target::custom_build_target(&name, package_root.join(custom_build))); + } + + Ok(targets) +} + + +fn clean_lib(toml_lib: Option<&TomlLibTarget>, + package_root: &Path, + package_name: &str, + warnings: &mut Vec) -> CargoResult> { + let inferred = inferred_lib(package_root); + let lib = match toml_lib { + Some(lib) => { + if let Some(ref name) = lib.name { + // XXX: other code paths dodge this validation + if name.contains('-') { + bail!("library target names cannot contain hyphens: {}", name) + } + } + Some(TomlTarget { + name: lib.name.clone().or_else(|| Some(package_name.to_owned())), + ..lib.clone() + }) + } + None => inferred.as_ref().map(|lib| { + TomlTarget { + name: Some(package_name.to_string()), + path: Some(PathValue(lib.clone())), + ..TomlTarget::new() + } + }) + }; + + let lib = match lib { + Some(ref lib) => lib, + None => return Ok(None) + }; + + validate_has_name(lib, "library", "lib")?; + + let path = match (lib.path.as_ref(), inferred) { + (Some(path), _) => package_root.join(&path.0), + (None, Some(path)) => path, + (None, None) => { + let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name())); + if legacy_path.exists() { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for library `{}`,\n\ + please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", + legacy_path.display(), lib.name() + )); + legacy_path + } else { + bail!("can't find library `{}`, \ + rename file to `src/lib.rs` or specify lib.path", lib.name()) + } + } + }; + + // Per the Macros 1.1 RFC: + // + // > Initially if a crate is compiled with the proc-macro crate type + // > (and possibly others) it will forbid exporting any items in the + // > crate other than those functions tagged #[proc_macro_derive] and + // > those functions must also be placed at the crate root. + // + // A plugin requires exporting plugin_registrar so a crate cannot be + // both at once. + let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) { + (_, Some(true), Some(true)) => bail!("lib.plugin and lib.proc-macro cannot both be true"), + (Some(kinds), _, _) => kinds.iter().map(|s| LibKind::from_str(s)).collect(), + (None, Some(true), _) => vec![LibKind::Dylib], + (None, _, Some(true)) => vec![LibKind::ProcMacro], + (None, _, _) => vec![LibKind::Lib], + }; + + let mut target = Target::lib_target(&lib.name(), crate_types, path); + configure(lib, &mut target); + Ok(Some(target)) +} + +fn clean_bins(toml_bins: Option<&Vec>, + package_root: &Path, + package_name: &str, + warnings: &mut Vec, + has_lib: bool) -> CargoResult> { + let inferred = inferred_bins(package_root, package_name); + let bins = match toml_bins { + Some(bins) => bins.clone(), + None => inferred.iter().map(|&(ref name, ref path)| { + TomlTarget { + name: Some(name.clone()), + path: Some(PathValue(path.clone())), + ..TomlTarget::new() + } + }).collect() + }; + + for bin in &bins { + validate_has_name(bin, "binary", "bin")?; + + let name = bin.name(); + if is_bad_artifact_name(&name) { + bail!("the binary target name `{}` is forbidden", name) + } + } + + validate_unique_names(&bins, "binary")?; + + let mut result = Vec::new(); + for bin in &bins { + let path = target_path(bin, &inferred, "bin", package_root, &mut |_| { + if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for binary `{}`,\n\ + please set bin.path in Cargo.toml", + legacy_path.display(), bin.name() + )); + Some(legacy_path) + } else { + None + } + }); + let path = match path { + Ok(path) => path, + Err(e) => bail!("{}", e), + }; + + let mut target = Target::bin_target(&bin.name(), path, + bin.required_features.clone()); + configure(bin, &mut target); + result.push(target); + } + return Ok(result); + + fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option { + if !has_lib { + let path = package_root.join("src").join(format!("{}.rs", name)); + if path.exists() { + return Some(path); + } + } + let path = package_root.join("src").join("main.rs"); + if path.exists() { + return Some(path); + } + + let path = package_root.join("src").join("bin").join("main.rs"); + if path.exists() { + return Some(path); + } + None + } +} + +fn clean_examples(toml_examples: Option<&Vec>, + package_root: &Path, + errors: &mut Vec) + -> CargoResult> { + + let inferred = infer_from_directory(&package_root.join("examples")); + + let targets = clean_targets("example", "example", + toml_examples, &inferred, + package_root, errors)?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let crate_types = match toml.crate_types() { + Some(kinds) => kinds.iter().map(|s| LibKind::from_str(s)).collect(), + None => Vec::new() + }; + + let mut target = Target::example_target(&toml.name(), crate_types, path, + toml.required_features.clone()); + configure(&toml, &mut target); + result.push(target); + } + + Ok(result) +} + +fn clean_tests(toml_tests: Option<&Vec>, + package_root: &Path, + errors: &mut Vec) -> CargoResult> { + + let inferred = infer_from_directory(&package_root.join("tests")); + + let targets = clean_targets("test", "test", + toml_tests, &inferred, + package_root, errors)?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = Target::test_target(&toml.name(), path, + toml.required_features.clone()); + configure(&toml, &mut target); + result.push(target); + } + Ok(result) +} + +fn clean_benches(toml_benches: Option<&Vec>, + package_root: &Path, + warnings: &mut Vec, + errors: &mut Vec) -> CargoResult> { + let mut legacy_bench_path = |bench: &TomlTarget| { + let legacy_path = package_root.join("src").join("bench.rs"); + if !(bench.name() == "bench" && legacy_path.exists()) { + return None; + } + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\ + please set bench.path in Cargo.toml", + legacy_path.display(), bench.name() + )); + Some(legacy_path) + }; + + let inferred = infer_from_directory(&package_root.join("benches")); + + let targets = clean_targets_with_legacy_path("benchmark", "bench", + toml_benches, &inferred, + package_root, + errors, + &mut legacy_bench_path)?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = Target::bench_target(&toml.name(), path, + toml.required_features.clone()); + configure(&toml, &mut target); + result.push(target); + } + + Ok(result) +} + +fn clean_targets(target_kind_human: &str, target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + errors: &mut Vec) + -> CargoResult> { + clean_targets_with_legacy_path(target_kind_human, target_kind, + toml_targets, + inferred, + package_root, + errors, + &mut |_| None) +} + +fn clean_targets_with_legacy_path(target_kind_human: &str, target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + errors: &mut Vec, + legacy_path: &mut FnMut(&TomlTarget) -> Option) + -> CargoResult> { + let toml_targets = match toml_targets { + Some(targets) => targets.clone(), + None => inferred.iter().map(|&(ref name, ref path)| { + TomlTarget { + name: Some(name.clone()), + path: Some(PathValue(path.clone())), + ..TomlTarget::new() + } + }).collect() + }; + + for target in &toml_targets { + validate_has_name(target, target_kind_human, target_kind)?; + } + + validate_unique_names(&toml_targets, target_kind)?; + let mut result = Vec::new(); + for target in toml_targets { + let path = target_path(&target, inferred, target_kind, package_root, legacy_path); + let path = match path { + Ok(path) => path, + Err(e) => { + errors.push(e); + continue + }, + }; + result.push((path, target)); + } + Ok(result) +} + + +fn inferred_lib(package_root: &Path) -> Option { + let lib = package_root.join("src").join("lib.rs"); + if fs::metadata(&lib).is_ok() { + Some(lib) + } else { + None + } +} + +fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> { + let main = package_root.join("src").join("main.rs"); + let mut result = Vec::new(); + if main.exists() { + result.push((package_name.to_string(), main)); + } + result.extend(infer_from_directory(&package_root.join("src").join("bin"))); + + result +} + +fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> { + let entries = match fs::read_dir(directory) { + Err(_) => return Vec::new(), + Ok(dir) => dir + }; + + entries + .filter_map(|e| e.ok()) + .filter(is_not_dotfile) + .filter_map(|d| infer_any(&d)) + .collect() +} + + +fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> { + if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") { + infer_file(entry) + } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) { + infer_subdirectory(entry) + } else { + None + } +} + + +fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + path + .file_stem() + .and_then(|p| p.to_str()) + .map(|p| (p.to_owned(), path.clone())) +} + + +fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + let main = path.join("main.rs"); + let name = path.file_name().and_then(|n| n.to_str()); + match (name, main.exists()) { + (Some(name), true) => Some((name.to_owned(), main)), + _ => None + } +} + + +fn is_not_dotfile(entry: &DirEntry) -> bool { + entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false) +} + + +fn validate_has_name(target: &TomlTarget, + target_kind_human: &str, + target_kind: &str) -> CargoResult<()> { + match target.name { + Some(ref name) => if name.trim().is_empty() { + bail!("{} target names cannot be empty", target_kind_human) + }, + None => bail!("{} target {}.name is required", target_kind_human, target_kind) + } + + Ok(()) +} + +/// Will check a list of toml targets, and make sure the target names are unique within a vector. +fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> { + let mut seen = HashSet::new(); + for name in targets.iter().map(|e| e.name()) { + if !seen.insert(name.clone()) { + bail!("found duplicate {target_kind} name {name}, \ + but all {target_kind} targets must have a unique name", + target_kind = target_kind, name = name); + } + } + Ok(()) +} + + +fn configure(toml: &TomlTarget, target: &mut Target) { + let t2 = target.clone(); + target.set_tested(toml.test.unwrap_or_else(|| t2.tested())) + .set_doc(toml.doc.unwrap_or_else(|| t2.documented())) + .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested())) + .set_benched(toml.bench.unwrap_or_else(|| t2.benched())) + .set_harness(toml.harness.unwrap_or_else(|| t2.harness())) + .set_for_host(match (toml.plugin, toml.proc_macro()) { + (None, None) => t2.for_host(), + (Some(true), _) | (_, Some(true)) => true, + (Some(false), _) | (_, Some(false)) => false, + }); +} + +fn target_path(target: &TomlTarget, + inferred: &[(String, PathBuf)], + target_kind: &str, + package_root: &Path, + legacy_path: &mut FnMut(&TomlTarget) -> Option) -> Result { + if let Some(ref path) = target.path { + // Should we verify that this path exists here? + return Ok(package_root.join(&path.0)); + } + let name = target.name(); + + let mut matching = inferred.iter() + .filter(|&&(ref n, _)| n == &name) + .map(|&(_, ref p)| p.clone()); + + let first = matching.next(); + let second = matching.next(); + match (first, second) { + (Some(path), None) => Ok(path), + (None, None) | (Some(_), Some(_)) => { + if let Some(path) = legacy_path(target) { + return Ok(path); + } + Err(format!("can't find `{name}` {target_kind}, specify {target_kind}.path", + name = name, target_kind = target_kind)) + } + (None, Some(_)) => unreachable!() + } +} diff --git a/collector/compile-benchmarks/cargo/src/cargo/util/vcs.rs b/collector/compile-benchmarks/cargo/src/cargo/util/vcs.rs new file mode 100644 index 000000000..1d3188e26 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/cargo/util/vcs.rs @@ -0,0 +1,66 @@ +use std::path::Path; +use std::fs::create_dir; + +use git2; + +use util::{CargoResult, process}; + +pub struct HgRepo; +pub struct GitRepo; +pub struct PijulRepo; +pub struct FossilRepo; + +impl GitRepo { + pub fn init(path: &Path, _: &Path) -> CargoResult { + git2::Repository::init(path)?; + Ok(GitRepo) + } + pub fn discover(path: &Path, _: &Path) -> Result { + git2::Repository::discover(path) + } +} + +impl HgRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("hg").cwd(cwd).arg("init").arg(path).exec()?; + Ok(HgRepo) + } + pub fn discover(path: &Path, cwd: &Path) -> CargoResult { + process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()?; + Ok(HgRepo) + } +} + +impl PijulRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("pijul").cwd(cwd).arg("init").arg(path).exec()?; + Ok(PijulRepo) + } +} + +impl FossilRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + // fossil doesn't create the directory so we'll do that first + create_dir(path)?; + + // set up the paths we'll use + let db_fname = ".fossil"; + let mut db_path = path.to_owned(); + db_path.push(db_fname); + + // then create the fossil DB in that location + process("fossil").cwd(cwd).arg("init").arg(&db_path).exec()?; + + // open it in that new directory + process("fossil").cwd(&path).arg("open").arg(db_fname).exec()?; + + // set `target` as ignoreable and cleanable + process("fossil").cwd(cwd).arg("settings") + .arg("ignore-glob") + .arg("target"); + process("fossil").cwd(cwd).arg("settings") + .arg("clean-glob") + .arg("target"); + Ok(FossilRepo) + } +} diff --git a/collector/compile-benchmarks/cargo/src/ci/dox.sh b/collector/compile-benchmarks/cargo/src/ci/dox.sh new file mode 100644 index 000000000..7dffde265 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/ci/dox.sh @@ -0,0 +1,33 @@ +set -ex + +DOCS="index faq config guide manifest build-script pkgid-spec crates-io \ + environment-variables specifying-dependencies source-replacement \ + external-tools" +ASSETS="CNAME images/noise.png images/forkme.png images/Cargo-Logo-Small.png \ + stylesheets/all.css stylesheets/normalize.css javascripts/prism.js \ + javascripts/all.js stylesheets/prism.css images/circle-with-i.png \ + images/search.png images/org-level-acl.png images/auth-level-acl.png \ + favicon.ico policies.html" + +for asset in $ASSETS; do + mkdir -p `dirname target/doc/$asset` + cp src/doc/$asset target/doc/$asset +done + +for doc in $DOCS; do + rustdoc \ + --markdown-no-toc \ + --markdown-css stylesheets/normalize.css \ + --markdown-css stylesheets/all.css \ + --markdown-css stylesheets/prism.css \ + --html-in-header src/doc/html-headers.html \ + --html-before-content src/doc/header.html \ + --html-after-content src/doc/footer.html \ + -o target/doc \ + src/doc/$doc.md +done + +# Temporary preview for mdBook docs +cd src/doc/book +$HOME/.cargo/bin/mdbook build --no-create --dest-dir ../../../target/doc/book +cd ../../../ diff --git a/collector/compile-benchmarks/cargo/src/crates-io/Cargo.toml b/collector/compile-benchmarks/cargo/src/crates-io/Cargo.toml new file mode 100644 index 000000000..25a07376d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/crates-io/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "crates-io" +version = "0.12.0" +authors = ["Alex Crichton "] +license = "MIT/Apache-2.0" +repository = "https://github.com/rust-lang/cargo" +description = """ +Helpers for interacting with crates.io +""" + +[lib] +name = "crates_io" +path = "lib.rs" + +[dependencies] +curl = "0.4" +error-chain = "0.11.0-rc.2" +serde = "1.0" +serde_derive = "1.0" +serde_json = "1.0" +url = "1.0" diff --git a/collector/compile-benchmarks/cargo/src/crates-io/lib.rs b/collector/compile-benchmarks/cargo/src/crates-io/lib.rs new file mode 100644 index 000000000..910d51a37 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/crates-io/lib.rs @@ -0,0 +1,343 @@ +#![allow(unknown_lints)] + +extern crate curl; +extern crate url; +#[macro_use] +extern crate error_chain; +extern crate serde_json; +#[macro_use] +extern crate serde_derive; + +use std::collections::BTreeMap; +use std::fs::File; +use std::io::prelude::*; +use std::io::{self, Cursor}; + +use curl::easy::{Easy, List}; + +use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; + +error_chain! { + foreign_links { + Curl(curl::Error); + Io(io::Error); + Json(serde_json::Error); + } + + errors { + NotOkResponse(code: u32, headers: Vec, body: Vec){ + description("failed to get a 200 OK response") + display("failed to get a 200 OK response, got {} +headers: + {} +body: +{}", code, headers.join("\n ", ), String::from_utf8_lossy(body)) + } + NonUtf8Body { + description("response body was not utf-8") + display("response body was not utf-8") + } + Api(errs: Vec) { + display("api errors: {}", errs.join(", ")) + } + Unauthorized { + display("unauthorized API access") + } + TokenMissing{ + display("no upload token found, please run `cargo login`") + } + NotFound { + display("cannot find crate") + } + } + } + +pub struct Registry { + host: String, + token: Option, + handle: Easy, +} + +#[derive(PartialEq, Clone, Copy)] +pub enum Auth { + Authorized, + Unauthorized, +} + +#[derive(Deserialize)] +pub struct Crate { + pub name: String, + pub description: Option, + pub max_version: String, +} + +#[derive(Serialize)] +pub struct NewCrate { + pub name: String, + pub vers: String, + pub deps: Vec, + pub features: BTreeMap>, + pub authors: Vec, + pub description: Option, + pub documentation: Option, + pub homepage: Option, + pub readme: Option, + pub keywords: Vec, + pub categories: Vec, + pub license: Option, + pub license_file: Option, + pub repository: Option, + pub badges: BTreeMap>, +} + +#[derive(Serialize)] +pub struct NewCrateDependency { + pub optional: bool, + pub default_features: bool, + pub name: String, + pub features: Vec, + pub version_req: String, + pub target: Option, + pub kind: String, +} + +#[derive(Deserialize)] +pub struct User { + pub id: u32, + pub login: String, + pub avatar: Option, + pub email: Option, + pub name: Option, +} + +pub struct Warnings { + pub invalid_categories: Vec, + pub invalid_badges: Vec, +} + +#[derive(Deserialize)] struct R { ok: bool } +#[derive(Deserialize)] struct OwnerResponse { ok: bool, msg: String } +#[derive(Deserialize)] struct ApiErrorList { errors: Vec } +#[derive(Deserialize)] struct ApiError { detail: String } +#[derive(Serialize)] struct OwnersReq<'a> { users: &'a [&'a str] } +#[derive(Deserialize)] struct Users { users: Vec } +#[derive(Deserialize)] struct TotalCrates { total: u32 } +#[derive(Deserialize)] struct Crates { crates: Vec, meta: TotalCrates } +impl Registry { + pub fn new(host: String, token: Option) -> Registry { + Registry::new_handle(host, token, Easy::new()) + } + + pub fn new_handle(host: String, + token: Option, + handle: Easy) -> Registry { + Registry { + host: host, + token: token, + handle: handle, + } + } + + pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.put(format!("/crates/{}/owners", krate), + body.as_bytes())?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(serde_json::from_str::(&body)?.msg) + } + + pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.delete(format!("/crates/{}/owners", krate), + Some(body.as_bytes()))?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn list_owners(&mut self, krate: &str) -> Result> { + let body = self.get(format!("/crates/{}/owners", krate))?; + Ok(serde_json::from_str::(&body)?.users) + } + + pub fn publish(&mut self, krate: &NewCrate, tarball: &File) + -> Result { + let json = serde_json::to_string(krate)?; + // Prepare the body. The format of the upload request is: + // + // + // (metadata for the package) + // + // + let stat = tarball.metadata()?; + let header = { + let mut w = Vec::new(); + w.extend([ + (json.len() >> 0) as u8, + (json.len() >> 8) as u8, + (json.len() >> 16) as u8, + (json.len() >> 24) as u8, + ].iter().map(|x| *x)); + w.extend(json.as_bytes().iter().map(|x| *x)); + w.extend([ + (stat.len() >> 0) as u8, + (stat.len() >> 8) as u8, + (stat.len() >> 16) as u8, + (stat.len() >> 24) as u8, + ].iter().map(|x| *x)); + w + }; + let size = stat.len() as usize + header.len(); + let mut body = Cursor::new(header).chain(tarball); + + let url = format!("{}/api/v1/crates/new", self.host); + + let token = match self.token.as_ref() { + Some(s) => s, + None => return Err(Error::from_kind(ErrorKind::TokenMissing)), + }; + self.handle.put(true)?; + self.handle.url(&url)?; + self.handle.in_filesize(size as u64)?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append(&format!("Authorization: {}", token))?; + self.handle.http_headers(headers)?; + + let body = handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0))?; + + let response = if body.len() > 0 { + body.parse::()? + } else { + "{}".parse()? + }; + + let invalid_categories: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_categories")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + let invalid_badges: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_badges")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + Ok(Warnings { + invalid_categories: invalid_categories, + invalid_badges: invalid_badges, + }) + } + + pub fn search(&mut self, query: &str, limit: u8) -> Result<(Vec, u32)> { + let formated_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET); + let body = self.req( + format!("/crates?q={}&per_page={}", formated_query, limit), + None, Auth::Unauthorized + )?; + + let crates = serde_json::from_str::(&body)?; + Ok((crates.crates, crates.meta.total)) + } + + pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.delete(format!("/crates/{}/{}/yank", krate, version), + None)?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.put(format!("/crates/{}/{}/unyank", krate, version), + &[])?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + fn put(&mut self, path: String, b: &[u8]) -> Result { + self.handle.put(true)?; + self.req(path, Some(b), Auth::Authorized) + } + + fn get(&mut self, path: String) -> Result { + self.handle.get(true)?; + self.req(path, None, Auth::Authorized) + } + + fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result { + self.handle.custom_request("DELETE")?; + self.req(path, b, Auth::Authorized) + } + + fn req(&mut self, + path: String, + body: Option<&[u8]>, + authorized: Auth) -> Result { + self.handle.url(&format!("{}/api/v1{}", self.host, path))?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append("Content-Type: application/json")?; + + if authorized == Auth::Authorized { + let token = match self.token.as_ref() { + Some(s) => s, + None => return Err(Error::from_kind(ErrorKind::TokenMissing)), + }; + headers.append(&format!("Authorization: {}", token))?; + } + self.handle.http_headers(headers)?; + match body { + Some(mut body) => { + self.handle.upload(true)?; + self.handle.in_filesize(body.len() as u64)?; + handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0)) + } + None => handle(&mut self.handle, &mut |_| 0), + } + } +} + +fn handle(handle: &mut Easy, + read: &mut FnMut(&mut [u8]) -> usize) -> Result { + let mut headers = Vec::new(); + let mut body = Vec::new(); + { + let mut handle = handle.transfer(); + handle.read_function(|buf| Ok(read(buf)))?; + handle.write_function(|data| { + body.extend_from_slice(data); + Ok(data.len()) + })?; + handle.header_function(|data| { + headers.push(String::from_utf8_lossy(data).into_owned()); + true + })?; + handle.perform()?; + } + + match handle.response_code()? { + 0 => {} // file upload url sometimes + 200 => {} + 403 => return Err(Error::from_kind(ErrorKind::Unauthorized)), + 404 => return Err(Error::from_kind(ErrorKind::NotFound)), + code => return Err(Error::from_kind(ErrorKind::NotOkResponse(code, headers, body))), + } + + let body = match String::from_utf8(body) { + Ok(body) => body, + Err(..) => return Err(Error::from_kind(ErrorKind::NonUtf8Body)), + }; + match serde_json::from_str::(&body) { + Ok(errors) => { + return Err(Error::from_kind(ErrorKind::Api(errors + .errors + .into_iter() + .map(|s| s.detail) + .collect()))) + } + Err(..) => {} + } + Ok(body) +} diff --git a/collector/compile-benchmarks/cargo/src/doc/CNAME b/collector/compile-benchmarks/cargo/src/doc/CNAME new file mode 100644 index 000000000..b68cc5511 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/CNAME @@ -0,0 +1 @@ +doc.crates.io diff --git a/collector/compile-benchmarks/cargo/src/doc/MIGRATION_MAP b/collector/compile-benchmarks/cargo/src/doc/MIGRATION_MAP new file mode 100644 index 000000000..433a7851f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/MIGRATION_MAP @@ -0,0 +1,12 @@ +index.md book/src/index.md book/src/getting-started/index.md book/src/getting-started/*.md +guide.md book/src/guide/index.md book/src/guide/*.md +build-script.md book/src/reference/build-scripts.md +config.md book/src/reference/config.md +crates-io.md book/src/reference/publishing.md +environment-variables.md book/src/reference/environment-variables.md +external-tools.md book/src/reference/external-tools.md +manifest.md book/src/reference/manifest.md +pkgid-spec.md book/src/reference/pkgid-spec.md +source-replacement.md book/src/reference/source-replacement.md +specifying-dependencies.md book/src/reference/specifying-dependencies.md +faq.md book/src/faq.md diff --git a/collector/compile-benchmarks/cargo/src/doc/README.md b/collector/compile-benchmarks/cargo/src/doc/README.md new file mode 100644 index 000000000..e68b5ca60 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/README.md @@ -0,0 +1,6 @@ +# Cargo Documentation + +NOTE: Cargo documentation is under migration to mdBook-based structure. All the +`*.md` files here shall be kept in sync with the `*.md` files under `book/src/`. +See `MIGRATION_MAP` file here and +for details. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/.gitignore b/collector/compile-benchmarks/cargo/src/doc/book/.gitignore new file mode 100644 index 000000000..5a0bf0317 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/.gitignore @@ -0,0 +1 @@ +/book diff --git a/collector/compile-benchmarks/cargo/src/doc/book/README.md b/collector/compile-benchmarks/cargo/src/doc/book/README.md new file mode 100644 index 000000000..b24da689e --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/README.md @@ -0,0 +1,47 @@ +# The Cargo Book + + +### Requirements + +Building the book requires [mdBook]. To get it: + +[mdBook]: https://github.com/azerupi/mdBook + +```shell +$ cargo install mdbook +``` + +### Building + +To build the book: + +```shell +$ mdbook build +``` + +The output will be in the `book` subdirectory. To check it out, open it in +your web browser. + +_Firefox:_ +```shell +$ firefox book/index.html # Linux +$ open -a "Firefox" book/index.html # OS X +$ Start-Process "firefox.exe" .\book\index.html # Windows (PowerShell) +$ start firefox.exe .\book\index.html # Windows (Cmd) +``` + +_Chrome:_ +```shell +$ google-chrome book/index.html # Linux +$ open -a "Google Chrome" book/index.html # OS X +$ Start-Process "chrome.exe" .\book\index.html # Windows (PowerShell) +$ start chrome.exe .\book\index.html # Windows (Cmd) +``` + + +## Contributing + +Given that the book is still in a draft state, we'd love your help! Please feel free to open +issues about anything, and send in PRs for things you'd like to fix or change. If your change is +large, please open an issue first, so we can make sure that it's something we'd accept before you +go through the work of getting a PR together. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/book.toml b/collector/compile-benchmarks/cargo/src/doc/book/book.toml new file mode 100644 index 000000000..1b84b2978 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/book.toml @@ -0,0 +1,2 @@ +title = "The Cargo Manual" +author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community" diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/SUMMARY.md b/collector/compile-benchmarks/cargo/src/doc/book/src/SUMMARY.md new file mode 100644 index 000000000..5f46bfade --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/SUMMARY.md @@ -0,0 +1,30 @@ +# Summary + +[Introduction](index.md) + +* [Getting Started](getting-started/index.md) + * [Installation](getting-started/installation.md) + * [First Steps with Cargo](getting-started/first-steps.md) + +* [Cargo Guide](guide/index.md) + * [Why Cargo Exists](guide/why-cargo-exists.md) + * [Creating a New Project](guide/creating-a-new-project.md) + * [Working on an Existing Project](guide/working-on-an-existing-project.md) + * [Dependencies](guide/dependencies.md) + * [Project Layout](guide/project-layout.md) + * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md) + * [Tests](guide/tests.md) + * [Continuous Integration](guide/continuous-integration.md) + +* [Cargo Reference](reference/index.md) + * [Specifying Dependencies](reference/specifying-dependencies.md) + * [The Manifest Format](reference/manifest.md) + * [Configuration](reference/config.md) + * [Environment Variables](reference/environment-variables.md) + * [Build Scripts](reference/build-scripts.md) + * [Publishing on crates.io](reference/publishing.md) + * [Package ID Specifications](reference/pkgid-spec.md) + * [Source Replacement](reference/source-replacement.md) + * [External Tools](reference/external-tools.md) + +* [FAQ](faq.md) diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/faq.md b/collector/compile-benchmarks/cargo/src/doc/book/src/faq.md new file mode 100644 index 000000000..7f13573fd --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/faq.md @@ -0,0 +1,193 @@ +## Frequently Asked Questions + +### Is the plan to use GitHub as a package repository? + +No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with +npmjs.org and rubygems.org. + +We plan to support git repositories as a source of packages forever, +because they can be used for early development and temporary patches, +even when people use the registry as the primary source of packages. + +### Why build crates.io rather than use GitHub as a registry? + +We think that it’s very important to support multiple ways to download +packages, including downloading from GitHub and copying packages into +your project itself. + +That said, we think that [crates.io] offers a number of important benefits, and +will likely become the primary way that people download packages in Cargo. + +For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a +central registry model as well as a Git-based model, and most packages +are downloaded through the registry in those ecosystems, with an +important minority of packages making use of git-based packages. + +[1]: https://www.npmjs.org +[2]: https://bundler.io + +Some of the advantages that make a central registry popular in other +languages include: + +* **Discoverability**. A central registry provides an easy place to look + for existing packages. Combined with tagging, this also makes it + possible for a registry to provide ecosystem-wide information, such as a + list of the most popular or most-depended-on packages. +* **Speed**. A central registry makes it possible to easily fetch just + the metadata for packages quickly and efficiently, and then to + efficiently download just the published package, and not other bloat + that happens to exist in the repository. This adds up to a significant + improvement in the speed of dependency resolution and fetching. As + dependency graphs scale up, downloading all of the git repositories bogs + down fast. Also remember that not everybody has a high-speed, + low-latency Internet connection. + +### Will Cargo work with C code (or other languages)? + +Yes! + +Cargo handles compiling Rust code, but we know that many Rust projects +link against C code. We also know that there are decades of tooling +built up around compiling languages other than Rust. + +Our solution: Cargo allows a package to [specify a script](reference/build-scripts.html) +(written in Rust) to run before invoking `rustc`. Rust is leveraged to +implement platform-specific configuration and refactor out common build +functionality among packages. + +### Can Cargo be used inside of `make` (or `ninja`, or ...) + +Indeed. While we intend Cargo to be useful as a standalone way to +compile Rust projects at the top-level, we know that some people will +want to invoke Cargo from other build tools. + +We have designed Cargo to work well in those contexts, paying attention +to things like error codes and machine-readable output modes. We still +have some work to do on those fronts, but using Cargo in the context of +conventional scripts is something we designed for from the beginning and +will continue to prioritize. + +### Does Cargo handle multi-platform projects or cross-compilation? + +Rust itself provides facilities for configuring sections of code based +on the platform. Cargo also supports [platform-specific +dependencies][target-deps], and we plan to support more per-platform +configuration in `Cargo.toml` in the future. + +[target-deps]: reference/manifest.html#the-dependencies-section + +In the longer-term, we’re looking at ways to conveniently cross-compile +projects using Cargo. + +### Does Cargo support environments, like `production` or `test`? + +We support environments through the use of [profiles][profile] to support: + +[profile]: reference/manifest.html#the-profile-sections + +* environment-specific flags (like `-g --opt-level=0` for development + and `--opt-level=3` for production). +* environment-specific dependencies (like `hamcrest` for test assertions). +* environment-specific `#[cfg]` +* a `cargo test` command + +### Does Cargo work on Windows? + +Yes! + +All commits to Cargo are required to pass the local test suite on Windows. +If, however, you find a Windows issue, we consider it a bug, so [please file an +issue][3]. + +[3]: https://github.com/rust-lang/cargo/issues + +### Why do binaries have `Cargo.lock` in version control, but not libraries? + +The purpose of a `Cargo.lock` is to describe the state of the world at the time +of a successful build. It is then used to provide deterministic builds across +whatever machine is building the project by ensuring that the exact same +dependencies are being compiled. + +This property is most desirable from applications and projects which are at the +very end of the dependency chain (binaries). As a result, it is recommended that +all binaries check in their `Cargo.lock`. + +For libraries the situation is somewhat different. A library is not only used by +the library developers, but also any downstream consumers of the library. Users +dependent on the library will not inspect the library’s `Cargo.lock` (even if it +exists). This is precisely because a library should **not** be deterministically +recompiled for all users of the library. + +If a library ends up being used transitively by several dependencies, it’s +likely that just a single copy of the library is desired (based on semver +compatibility). If all libraries were to check in their `Cargo.lock`, then +multiple copies of the library would be used, and perhaps even a version +conflict. + +In other words, libraries specify semver requirements for their dependencies but +cannot see the full picture. Only end products like binaries have a full +picture to decide what versions of dependencies should be used. + +### Can libraries use `*` as a version for their dependencies? + +**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) +with wildcard dependency constraints.** + +While libraries _can_, strictly speaking, they should not. A version requirement +of `*` says “This will work with every version ever,” which is never going +to be true. Libraries should always specify the range that they do work with, +even if it’s something as general as “every 1.x.y version.” + +### Why `Cargo.toml`? + +As one of the most frequent interactions with Cargo, the question of why the +configuration file is named `Cargo.toml` arises from time to time. The leading +capital-`C` was chosen to ensure that the manifest was grouped with other +similar configuration files in directory listings. Sorting files often puts +capital letters before lowercase letters, ensuring files like `Makefile` and +`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize +the fact that the file is in the [TOML configuration +format](https://github.com/toml-lang/toml). + +Cargo does not allow other names such as `cargo.toml` or `Cargofile` to +emphasize the ease of how a Cargo repository can be identified. An option of +many possible names has historically led to confusion where one case was handled +but others were accidentally forgotten. + +[crates.io]: https://crates.io/ + +### How can Cargo work offline? + +Cargo is often used in situations with limited or no network access such as +airplanes, CI environments, or embedded in large production deployments. Users +are often surprised when Cargo attempts to fetch resources from the network, and +hence the request for Cargo to work offline comes up frequently. + +Cargo, at its heart, will not attempt to access the network unless told to do +so. That is, if no crates comes from crates.io, a git repository, or some other +network location, Cargo will never attempt to make a network connection. As a +result, if Cargo attempts to touch the network, then it's because it needs to +fetch a required resource. + +Cargo is also quite aggressive about caching information to minimize the amount +of network activity. It will guarantee, for example, that if `cargo build` (or +an equivalent) is run to completion then the next `cargo build` is guaranteed to +not touch the network so long as `Cargo.toml` has not been modified in the +meantime. This avoidance of the network boils down to a `Cargo.lock` existing +and a populated cache of the crates reflected in the lock file. If either of +these components are missing, then they're required for the build to succeed and +must be fetched remotely. + +As of Rust 1.11.0 Cargo understands a new flag, `--frozen`, which is an +assertion that it shouldn't touch the network. When passed, Cargo will +immediately return an error if it would otherwise attempt a network request. +The error should include contextual information about why the network request is +being made in the first place to help debug as well. Note that this flag *does +not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch +the network as a previous command has been run to ensure that network activity +shouldn't be necessary. + +For more information about vendoring, see documentation on [source +replacement][replace]. + +[replace]: reference/source-replacement.html diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/first-steps.md b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/first-steps.md new file mode 100644 index 000000000..190f69f55 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/first-steps.md @@ -0,0 +1,70 @@ +## First Steps with Cargo + +To start a new project with Cargo, use `cargo new`: + +```shell +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d leave it off. + +Let’s check out what Cargo has generated for us: + +```shell +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +This is all we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```shell +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```shell +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step: + +```shell +$ cargo run + Fresh hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/hello_world` +Hello, world! +``` + +### Going further + +For more details on using Cargo, check out the [Cargo Guide](guide/index.html) diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/index.md b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/index.md new file mode 100644 index 000000000..22a7315cf --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/index.md @@ -0,0 +1,6 @@ +## Getting Started + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +* [Installation](getting-started/installation.html) +* [First steps with Cargo](getting-started/first-steps.html) diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/installation.md b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/installation.md new file mode 100644 index 000000000..8428a9063 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/getting-started/installation.md @@ -0,0 +1,38 @@ +## Installation + +### Install Stable Rust and Cargo + +The easiest way to get Cargo is to get the current stable release of [Rust] by +using the `rustup` script: + +```shell +$ curl -sSf https://static.rust-lang.org/rustup.sh | sh +``` + +After this, you can use the `rustup` command to also install `beta` or `nightly` +channels for Rust and Cargo. + +### Install Nightly Cargo + +To install just Cargo, the current recommended installation method is through +the official nightly builds. Note that Cargo will also require that [Rust] is +already installed on the system. + +| Platform | 64-bit | 32-bit | +|------------------|-------------------|-------------------| +| Linux binaries | [tar.gz][linux64] | [tar.gz][linux32] | +| MacOS binaries | [tar.gz][mac64] | [tar.gz][mac32] | +| Windows binaries | [tar.gz][win64] | [tar.gz][win32] | + +### Build and Install Cargo from Source + +Alternatively, you can [build Cargo from source][compiling-from-source]. + +[rust]: https://www.rust-lang.org/ +[linux64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-unknown-linux-gnu.tar.gz +[linux32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-unknown-linux-gnu.tar.gz +[mac64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-apple-darwin.tar.gz +[mac32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-apple-darwin.tar.gz +[win64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-gnu.tar.gz +[win32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-pc-windows-gnu.tar.gz +[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/cargo-toml-vs-cargo-lock.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/cargo-toml-vs-cargo-lock.md new file mode 100644 index 000000000..574a6677f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/cargo-toml-vs-cargo-lock.md @@ -0,0 +1,103 @@ +## Cargo.toml vs Cargo.lock + +`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk +about them, here’s a summary: + +* `Cargo.toml` is about describing your dependencies in a broad sense, and is + written by you. +* `Cargo.lock` contains exact information about your dependencies. It is + maintained by Cargo and should not be manually edited. + +If you’re building a library that other projects will depend on, put +`Cargo.lock` in your `.gitignore`. If you’re building an executable like a +command-line tool or an application, check `Cargo.lock` into `git`. If you're +curious about why that is, see ["Why do binaries have `Cargo.lock` in version +control, but not libraries?" in the +FAQ](faq.html#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). + +Let’s dig in a little bit more. + +`Cargo.toml` is a **manifest** file in which we can specify a bunch of +different metadata about our project. For example, we can say that we depend +on another project: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +This project has a single dependency, on the `rand` library. We’ve stated in +this case that we’re relying on a particular Git repository that lives on +GitHub. Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. + +Sound good? Well, there’s one problem: If you build this project today, and +then you send a copy to me, and I build this project tomorrow, something bad +could happen. There could be more commits to `rand` in the meantime, and my +build would include new commits while yours would not. Therefore, we would +get different builds. This would be bad because we want reproducible builds. + +We could fix this problem by putting a `rev` line in our `Cargo.toml`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git", rev = "9f35b8e" } +``` + +Now our builds will be the same. But there’s a big drawback: now we have to +manually think about SHA-1s every time we want to update our library. This is +both tedious and error prone. + +Enter the `Cargo.lock`. Because of its existence, we don’t need to manually +keep track of the exact revisions: Cargo will do it for us. When we have a +manifest like this: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +Cargo will take the latest commit and write that information out into our +`Cargo.lock` when we build for the first time. That file will look like this: + +```toml +[[package]] +name = "hello_world" +version = "0.1.0" +dependencies = [ + "rand 0.1.0 (git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9)", +] + +[[package]] +name = "rand" +version = "0.1.0" +source = "git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9" +``` + +You can see that there’s a lot more information here, including the exact +revision we used to build. Now when you give your project to someone else, +they’ll use the exact same SHA, even though we didn’t specify it in our +`Cargo.toml`. + +When we’re ready to opt in to a new version of the library, Cargo can +re-calculate the dependencies and update things for us: + +```shell +$ cargo update # updates all dependencies +$ cargo update -p rand # updates just “rand” +``` + +This will write out a new `Cargo.lock` with the new version information. Note +that the argument to `cargo update` is actually a +[Package ID Specification](reference/pkgid-spec.html) and `rand` is just a short +specification. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/continuous-integration.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/continuous-integration.md new file mode 100644 index 000000000..6e5efe72c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/continuous-integration.md @@ -0,0 +1,21 @@ +## Continuous Integration + +### Travis CI + +To test your project on Travis CI, here is a sample `.travis.yml` file: + +```yaml +language: rust +rust: + - stable + - beta + - nightly +matrix: + allow_failures: + - rust: nightly +``` + +This will test all three release channels, but any breakage in nightly +will not fail your overall build. Please see the [Travis CI Rust +documentation](https://docs.travis-ci.com/user/languages/rust/) for more +information. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/creating-a-new-project.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/creating-a-new-project.md new file mode 100644 index 000000000..3f0c90e3c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/creating-a-new-project.md @@ -0,0 +1,89 @@ +## Creating a New Project + +To start a new project with Cargo, use `cargo new`: + +```shell +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d leave it off. This also initializes a new `git` +repository by default. If you don't want it to do that, pass `--vcs none`. + +Let’s check out what Cargo has generated for us: + +```shell +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +If we had just used `cargo new hello_world` without the `--bin` flag, then +we would have a `lib.rs` instead of a `main.rs`. For now, however, this is all +we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```shell +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```shell +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step (You +won't see the `Compiling` line if you have not made any changes since you last +compiled): + +```shell +$ cargo run + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/debug/hello_world` +Hello, world! +``` + +You’ll now notice a new file, `Cargo.lock`. It contains information about our +dependencies. Since we don’t have any yet, it’s not very interesting. + +Once you’re ready for release, you can use `cargo build --release` to compile +your files with optimizations turned on: + +```shell +$ cargo build --release + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +`cargo build --release` puts the resulting binary in `target/release` instead of +`target/debug`. + +Compiling in debug mode is the default for development-- compilation time is +shorter since the compiler doesn't do optimizations, but the code will run +slower. Release mode takes longer to compile, but the code will run faster. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/dependencies.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/dependencies.md new file mode 100644 index 000000000..e199487f2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/dependencies.md @@ -0,0 +1,90 @@ +## Dependencies + +[crates.io] is the Rust community's central package registry that serves as a +location to discover and download packages. `cargo` is configured to use it by +default to find requested packages. + +To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. + +[crates.io]: https://crates.io/ + +### Adding a dependency + +If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that, +then list the crate name and version that you would like to use. This example +adds a dependency of the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The version string is a [semver] version requirement. The [specifying +dependencies](reference/specifying-dependencies.html) docs have more information about +the options you have here. + +[semver]: https://github.com/steveklabnik/semver#requirements + +If we also wanted to add a dependency on the `regex` crate, we would not need +to add `[dependencies]` for each crate listed. Here's what your whole +`Cargo.toml` file would look like with dependencies on the `time` and `regex` +crates: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +time = "0.1.12" +regex = "0.1.41" +``` + +Re-run `cargo build`, and Cargo will fetch the new dependencies and all of +their dependencies, compile them all, and update the `Cargo.lock`: + +```shell +$ cargo build + Updating registry `https://github.com/rust-lang/crates.io-index` + Downloading memchr v0.1.5 + Downloading libc v0.1.10 + Downloading regex-syntax v0.2.1 + Downloading memchr v0.1.5 + Downloading aho-corasick v0.3.0 + Downloading regex v0.1.41 + Compiling memchr v0.1.5 + Compiling libc v0.1.10 + Compiling regex-syntax v0.2.1 + Compiling memchr v0.1.5 + Compiling aho-corasick v0.3.0 + Compiling regex v0.1.41 + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +Our `Cargo.lock` contains the exact information about which revision of all of +these dependencies we used. + +Now, if `regex` gets updated, we will still build with the same revision until +we choose to `cargo update`. + +You can now use the `regex` library using `extern crate` in `main.rs`. + +``` +extern crate regex; + +use regex::Regex; + +fn main() { + let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); + println!("Did our date match? {}", re.is_match("2014-01-01")); +} +``` + +Running it will show: + +```shell +$ cargo run + Running `target/hello_world` +Did our date match? true +``` diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/index.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/index.md new file mode 100644 index 000000000..d8bfda17c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/index.md @@ -0,0 +1,13 @@ +## Cargo Guide + +This guide will give you all that you need to know about how to use Cargo to +develop Rust projects. + +* [Why Cargo Exists](guide/why-cargo-exists.html) +* [Creating a New Project](guide/creating-a-new-project.html) +* [Working on an Existing Cargo Project](guide/working-on-an-existing-project.html) +* [Dependencies](guide/dependencies.html) +* [Project Layout](guide/project-layout.html) +* [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.html) +* [Tests](guide/tests.html) +* [Continuous Integration](guide/continuous-integration.html) diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/project-layout.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/project-layout.md new file mode 100644 index 000000000..f9eb7d331 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/project-layout.md @@ -0,0 +1,35 @@ +## Project Layout + +Cargo uses conventions for file placement to make it easy to dive into a new +Cargo project: + +```shell +. +├── Cargo.lock +├── Cargo.toml +├── benches +│   └── large-input.rs +├── examples +│   └── simple.rs +├── src +│   ├── bin +│   │   └── another_executable.rs +│   ├── lib.rs +│   └── main.rs +└── tests + └── some-integration-tests.rs +``` + +* `Cargo.toml` and `Cargo.lock` are stored in the root of your project (*package + root*). +* Source code goes in the `src` directory. +* The default library file is `src/lib.rs`. +* The default executable file is `src/main.rs`. +* Other executables can be placed in `src/bin/*.rs`. +* Integration tests go in the `tests` directory (unit tests go in each file + they're testing). +* Examples go in the `examples` directory. +* Benchmarks go in the `benches` directory. + +These are explained in more detail in the [manifest +description](reference/manifest.html#the-project-layout). diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/tests.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/tests.md new file mode 100644 index 000000000..743a83f85 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/tests.md @@ -0,0 +1,39 @@ +## Tests + +Cargo can run your tests with the `cargo test` command. Cargo looks for tests +to run in two places: in each of your `src` files and any tests in `tests/`. +Tests in your `src` files should be unit tests, and tests in `tests/` should be +integration-style tests. As such, you’ll need to import your crates into +the files in `tests`. + +Here's an example of running `cargo test` in our project, which currently has +no tests: + +```shell +$ cargo test + Compiling rand v0.1.0 (https://github.com/rust-lang-nursery/rand.git#9f35b8e) + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running target/test/hello_world-9c2b65bbb79eabce + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` + +If our project had tests, we would see more output with the correct number of +tests. + +You can also run a specific test by passing a filter: + +```shell +$ cargo test foo +``` + +This will run any test with `foo` in its name. + +`cargo test` runs additional checks as well. For example, it will compile any +examples you’ve included and will also test the examples in your +documentation. Please see the [testing guide][testing] in the Rust +documentation for more details. + +[testing]: https://doc.rust-lang.org/book/testing.html diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/why-cargo-exists.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/why-cargo-exists.md new file mode 100644 index 000000000..9c5d0d2dd --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/why-cargo-exists.md @@ -0,0 +1,12 @@ +## Why Cargo Exists + +Cargo is a tool that allows Rust projects to declare their various +dependencies and ensure that you’ll always get a repeatable build. + +To accomplish this goal, Cargo does four things: + +* Introduces two metadata files with various bits of project information. +* Fetches and builds your project’s dependencies. +* Invokes `rustc` or another build tool with the correct parameters to build + your project. +* Introduces conventions to make working with Rust projects easier. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/guide/working-on-an-existing-project.md b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/working-on-an-existing-project.md new file mode 100644 index 000000000..97c032005 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/guide/working-on-an-existing-project.md @@ -0,0 +1,22 @@ +## Working on an Existing Cargo Project + +If you download an existing project that uses Cargo, it’s really easy +to get going. + +First, get the project from somewhere. In this example, we’ll use `rand` +cloned from its repository on GitHub: + +```shell +$ git clone https://github.com/rust-lang-nursery/rand.git +$ cd rand +``` + +To build, use `cargo build`: + +```shell +$ cargo build + Compiling rand v0.1.0 (file:///path/to/project/rand) +``` + +This will fetch all of the dependencies and then build them, along with the +project. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/images/Cargo-Logo-Small.png b/collector/compile-benchmarks/cargo/src/doc/book/src/images/Cargo-Logo-Small.png new file mode 100644 index 000000000..e3a99208c Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/book/src/images/Cargo-Logo-Small.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/images/auth-level-acl.png b/collector/compile-benchmarks/cargo/src/doc/book/src/images/auth-level-acl.png new file mode 100644 index 000000000..e7bc25180 Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/book/src/images/auth-level-acl.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/images/org-level-acl.png b/collector/compile-benchmarks/cargo/src/doc/book/src/images/org-level-acl.png new file mode 100644 index 000000000..ed5aa882a Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/book/src/images/org-level-acl.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/index.md b/collector/compile-benchmarks/cargo/src/doc/book/src/index.md new file mode 100644 index 000000000..3de0fc137 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/index.md @@ -0,0 +1,28 @@ +# The Cargo Manual + +![Cargo Logo](images/Cargo-Logo-Small.png) + +Cargo is the [Rust] *package manager*. Cargo downloads your Rust project’s +dependencies, compiles your project, makes packages, and upload them to +[crates.io], the Rust *package registry*. + + +### Sections + +**[Getting Started](getting-started.html)** + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +**[Cargo Guide](guide/index.html)** + +The guide will give you all you need to know about how to use Cargo to develop +Rust projects. + +**[Cargo Reference](reference/index.html)** + +The reference covers the details of various areas of Cargo. + +**[Frequently Asked Questions](faq.html)** + +[rust]: https://www.rust-lang.org/ +[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/build-scripts.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/build-scripts.md new file mode 100644 index 000000000..2c6fe3d81 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/build-scripts.md @@ -0,0 +1,556 @@ +## Build Scripts + +Some packages need to compile third-party non-Rust code, for example C +libraries. Other packages need to link to C libraries which can either be +located on the system or possibly need to be built from source. Others still +need facilities for functionality such as code generation before building (think +parser generators). + +Cargo does not aim to replace other tools that are well-optimized for +these tasks, but it does integrate with them with the `build` configuration +option. + +```toml +[package] +# ... +build = "build.rs" +``` + +The Rust file designated by the `build` command (relative to the package root) +will be compiled and invoked before anything else is compiled in the package, +allowing your Rust code to depend on the built or generated artifacts. Note that +if you do not specify a value for `build` but your package root does contains a +`"build.rs"` file, Cargo will compile and invoke this file for you. + +Some example use cases of the build command are: + +* Building a bundled C library. +* Finding a C library on the host system. +* Generating a Rust module from a specification. +* Performing any platform-specific configuration needed for the crate. + +Each of these use cases will be detailed in full below to give examples of how +the build command works. + +### Inputs to the Build Script + +When the build script is run, there are a number of inputs to the build script, +all passed in the form of [environment variables][env]. + +In addition to environment variables, the build script’s current directory is +the source directory of the build script’s package. + +[env]: reference/environment-variables.html + +### Outputs of the Build Script + +All the lines printed to stdout by a build script are written to a file like +`target/debug/build//output` (the precise location may depend on your +configuration). Any line that starts with `cargo:` is interpreted directly by +Cargo. This line must be of the form `cargo:key=value`, like the examples below: + +```shell +# specially recognized by Cargo +cargo:rustc-link-lib=static=foo +cargo:rustc-link-search=native=/path/to/foo +cargo:rustc-cfg=foo +cargo:rustc-env=FOO=bar +# arbitrary user-defined metadata +cargo:root=/path/to/foo +cargo:libdir=/path/to/foo/lib +cargo:include=/path/to/foo/include +``` + +On the other hand, lines printed to stderr are written to a file like +`target/debug/build//stderr` but are not interpreted by cargo. + +There are a few special keys that Cargo recognizes, some affecting how the +crate is built: + +* `rustc-link-lib=[KIND=]NAME` indicates that the specified value is a library + name and should be passed to the compiler as a `-l` flag. The optional `KIND` + can be one of `static`, `dylib` (the default), or `framework`, see + `rustc --help` for more details. +* `rustc-link-search=[KIND=]PATH` indicates the specified value is a library + search path and should be passed to the compiler as a `-L` flag. The optional + `KIND` can be one of `dependency`, `crate`, `native`, `framework` or `all` + (the default), see `rustc --help` for more details. +* `rustc-flags=FLAGS` is a set of flags passed to the compiler, only `-l` and + `-L` flags are supported. +* `rustc-cfg=FEATURE` indicates that the specified feature will be passed as a + `--cfg` flag to the compiler. This is often useful for performing compile-time + detection of various features. +* `rustc-env=VAR=VALUE` indicates that the specified environment variable + will be added to the environment which the compiler is run within. + The value can be then retrieved by the `env!` macro in the compiled crate. + This is useful for embedding additional metadata in crate's code, + such as the hash of Git HEAD or the unique identifier of a continuous + integration server. +* `rerun-if-changed=PATH` is a path to a file or directory which indicates that + the build script should be re-run if it changes (detected by a more-recent + last-modified timestamp on the file). Normally build scripts are re-run if + any file inside the crate root changes, but this can be used to scope changes + to just a small set of files. (If this path points to a directory the entire + directory will not be traversed for changes -- only changes to the timestamp + of the directory itself (which corresponds to some types of changes within the + directory, depending on platform) will trigger a rebuild. To request a re-run + on any changes within an entire directory, print a line for the directory and + another line for everything inside it, recursively.) + Note that if the build script itself (or one of its dependencies) changes, + then it's rebuilt and rerun unconditionally, so + `cargo:rerun-if-changed=build.rs` is almost always redundant (unless you + want to ignore changes in all other files except for `build.rs`). +* `rerun-if-env-changed=VAR` is the name of an environment variable which + indicates that if the environment variable's value changes the build script + should be rerun. This basically behaves the same as `rerun-if-changed` except + that it works with environment variables instead. Note that the environment + variables here are intended for global environment variables like `CC` and + such, it's not necessary to use this for env vars like `TARGET` that Cargo + sets. Also note that if `rerun-if-env-changed` is printed out then Cargo will + *only* rerun the build script if those environment variables change or if + files printed out by `rerun-if-changed` change. + +* `warning=MESSAGE` is a message that will be printed to the main console after + a build script has finished running. Warnings are only shown for path + dependencies (that is, those you're working on locally), so for example + warnings printed out in crates.io crates are not emitted by default. + +Any other element is a user-defined metadata that will be passed to +dependents. More information about this can be found in the [`links`][links] +section. + +[links]: #the-links-manifest-key + +### Build Dependencies + +Build scripts are also allowed to have dependencies on other Cargo-based crates. +Dependencies are declared through the `build-dependencies` section of the +manifest. + +```toml +[build-dependencies] +foo = { git = "https://github.com/your-packages/foo" } +``` + +The build script **does not** have access to the dependencies listed in the +`dependencies` or `dev-dependencies` section (they’re not built yet!). All build +dependencies will also not be available to the package itself unless explicitly +stated as so. + +### The `links` Manifest Key + +In addition to the manifest key `build`, Cargo also supports a `links` manifest +key to declare the name of a native library that is being linked to: + +```toml +[package] +# ... +links = "foo" +build = "build.rs" +``` + +This manifest states that the package links to the `libfoo` native library, and +it also has a build script for locating and/or building the library. Cargo +requires that a `build` command is specified if a `links` entry is also +specified. + +The purpose of this manifest key is to give Cargo an understanding about the set +of native dependencies that a package has, as well as providing a principled +system of passing metadata between package build scripts. + +Primarily, Cargo requires that there is at most one package per `links` value. +In other words, it’s forbidden to have two packages link to the same native +library. Note, however, that there are [conventions in place][star-sys] to +alleviate this. + +[star-sys]: #-sys-packages + +As mentioned above in the output format, each build script can generate an +arbitrary set of metadata in the form of key-value pairs. This metadata is +passed to the build scripts of **dependent** packages. For example, if `libbar` +depends on `libfoo`, then if `libfoo` generates `key=value` as part of its +metadata, then the build script of `libbar` will have the environment variables +`DEP_FOO_KEY=value`. + +Note that metadata is only passed to immediate dependents, not transitive +dependents. The motivation for this metadata passing is outlined in the linking +to system libraries case study below. + +### Overriding Build Scripts + +If a manifest contains a `links` key, then Cargo supports overriding the build +script specified with a custom library. The purpose of this functionality is to +prevent running the build script in question altogether and instead supply the +metadata ahead of time. + +To override a build script, place the following configuration in any acceptable +Cargo [configuration location](reference/config.html). + +```toml +[target.x86_64-unknown-linux-gnu.foo] +rustc-link-search = ["/path/to/foo"] +rustc-link-lib = ["foo"] +root = "/path/to/foo" +key = "value" +``` + +This section states that for the target `x86_64-unknown-linux-gnu` the library +named `foo` has the metadata specified. This metadata is the same as the +metadata generated as if the build script had run, providing a number of +key/value pairs where the `rustc-flags`, `rustc-link-search`, and +`rustc-link-lib` keys are slightly special. + +With this configuration, if a package declares that it links to `foo` then the +build script will **not** be compiled or run, and the metadata specified will +instead be used. + +### Case study: Code generation + +Some Cargo packages need to have code generated just before they are compiled +for various reasons. Here we’ll walk through a simple example which generates a +library call as part of the build script. + +First, let’s take a look at the directory structure of this package: + +```shell +. +├── Cargo.toml +├── build.rs +└── src + └── main.rs + +1 directory, 3 files +``` + +Here we can see that we have a `build.rs` build script and our binary in +`main.rs`. Next, let’s take a look at the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-from-generated-code" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +Here we can see we’ve got a build script specified which we’ll use to generate +some code. Let’s see what’s inside the build script: + +```rust,no_run +// build.rs + +use std::env; +use std::fs::File; +use std::io::Write; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("hello.rs"); + let mut f = File::create(&dest_path).unwrap(); + + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); +} +``` + +There’s a couple of points of note here: + +* The script uses the `OUT_DIR` environment variable to discover where the + output files should be located. It can use the process’ current working + directory to find where the input files should be located, but in this case we + don’t have any input files. +* This script is relatively simple as it just writes out a small generated file. + One could imagine that other more fanciful operations could take place such as + generating a Rust module from a C header file or another language definition, + for example. + +Next, let’s peek at the library itself: + +```rust,ignore +// src/main.rs + +include!(concat!(env!("OUT_DIR"), "/hello.rs")); + +fn main() { + println!("{}", message()); +} +``` + +This is where the real magic happens. The library is using the rustc-defined +`include!` macro in combination with the `concat!` and `env!` macros to include +the generated file (`hello.rs`) into the crate’s compilation. + +Using the structure shown here, crates can include any number of generated files +from the build script itself. + +### Case study: Building some native code + +Sometimes it’s necessary to build some native C or C++ code as part of a +package. This is another excellent use case of leveraging the build script to +build a native library before the Rust crate itself. As an example, we’ll create +a Rust library which calls into C to print “Hello, World!”. + +Like above, let’s first take a look at the project layout: + +```shell +. +├── Cargo.toml +├── build.rs +└── src + ├── hello.c + └── main.rs + +1 directory, 4 files +``` + +Pretty similar to before! Next, the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-world-from-c" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +For now we’re not going to use any build dependencies, so let’s take a look at +the build script now: + +```rust,no_run +// build.rs + +use std::process::Command; +use std::env; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + + // note that there are a number of downsides to this approach, the comments + // below detail how to improve the portability of these commands. + Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) + .arg(&format!("{}/hello.o", out_dir)) + .status().unwrap(); + Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) + .current_dir(&Path::new(&out_dir)) + .status().unwrap(); + + println!("cargo:rustc-link-search=native={}", out_dir); + println!("cargo:rustc-link-lib=static=hello"); +} +``` + +This build script starts out by compiling our C file into an object file (by +invoking `gcc`) and then converting this object file into a static library (by +invoking `ar`). The final step is feedback to Cargo itself to say that our +output was in `out_dir` and the compiler should link the crate to `libhello.a` +statically via the `-l static=hello` flag. + +Note that there are a number of drawbacks to this hardcoded approach: + +* The `gcc` command itself is not portable across platforms. For example it’s + unlikely that Windows platforms have `gcc`, and not even all Unix platforms + may have `gcc`. The `ar` command is also in a similar situation. +* These commands do not take cross-compilation into account. If we’re cross + compiling for a platform such as Android it’s unlikely that `gcc` will produce + an ARM executable. + +Not to fear, though, this is where a `build-dependencies` entry would help! The +Cargo ecosystem has a number of packages to make this sort of task much easier, +portable, and standardized. For example, the build script could be written as: + +```rust,ignore +// build.rs + +// Bring in a dependency on an externally maintained `gcc` package which manages +// invoking the C compiler. +extern crate gcc; + +fn main() { + gcc::compile_library("libhello.a", &["src/hello.c"]); +} +``` + +Add a build time dependency on the `gcc` crate with the following addition to +your `Cargo.toml`: + +```toml +[build-dependencies] +gcc = "0.3" +``` + +The [`gcc` crate](https://crates.io/crates/gcc) abstracts a range of build +script requirements for C code: + +* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` + for Unix platforms, etc.). +* It takes the `TARGET` variable into account by passing appropriate flags to + the compiler being used. +* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all + handled automatically. +* The stdout output and `OUT_DIR` locations are also handled by the `gcc` + library. + +Here we can start to see some of the major benefits of farming as much +functionality as possible out to common build dependencies rather than +duplicating logic across all build scripts! + +Back to the case study though, let’s take a quick look at the contents of the +`src` directory: + +```c +// src/hello.c + +#include + +void hello() { + printf("Hello, World!\n"); +} +``` + +```rust,ignore +// src/main.rs + +// Note the lack of the `#[link]` attribute. We’re delegating the responsibility +// of selecting what to link to over to the build script rather than hardcoding +// it in the source file. +extern { fn hello(); } + +fn main() { + unsafe { hello(); } +} +``` + +And there we go! This should complete our example of building some C code from a +Cargo package using the build script itself. This also shows why using a build +dependency can be crucial in many situations and even much more concise! + +We’ve also seen a brief example of how a build script can use a crate as a +dependency purely for the build process and not for the crate itself at runtime. + +### Case study: Linking to system libraries + +The final case study here will be investigating how a Cargo library links to a +system library and how the build script is leveraged to support this use case. + +Quite frequently a Rust crate wants to link to a native library often provided +on the system to bind its functionality or just use it as part of an +implementation detail. This is quite a nuanced problem when it comes to +performing this in a platform-agnostic fashion, and the purpose of a build +script is again to farm out as much of this as possible to make this as easy as +possible for consumers. + +As an example to follow, let’s take a look at one of [Cargo’s own +dependencies][git2-rs], [libgit2][libgit2]. The C library has a number of +constraints: + +[git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys +[libgit2]: https://github.com/libgit2/libgit2 + +* It has an optional dependency on OpenSSL on Unix to implement the https + transport. +* It has an optional dependency on libssh2 on all platforms to implement the ssh + transport. +* It is often not installed on all systems by default. +* It can be built from source using `cmake`. + +To visualize what’s going on here, let’s take a look at the manifest for the +relevant Cargo package that links to the native C library. + +```toml +[package] +name = "libgit2-sys" +version = "0.1.0" +authors = ["..."] +links = "git2" +build = "build.rs" + +[dependencies] +libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" } + +[target.'cfg(unix)'.dependencies] +openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" } + +# ... +``` + +As the above manifests show, we’ve got a `build` script specified, but it’s +worth noting that this example has a `links` entry which indicates that the +crate (`libgit2-sys`) links to the `git2` native library. + +Here we also see that we chose to have the Rust crate have an unconditional +dependency on `libssh2` via the `libssh2-sys` crate, as well as a +platform-specific dependency on `openssl-sys` for \*nix (other variants elided +for now). It may seem a little counterintuitive to express *C dependencies* in +the *Cargo manifest*, but this is actually using one of Cargo’s conventions in +this space. + +### `*-sys` Packages + +To alleviate linking to system libraries, Cargo has a *convention* of package +naming and functionality. Any package named `foo-sys` will provide two major +pieces of functionality: + +* The library crate will link to the native library `libfoo`. This will often + probe the current system for `libfoo` before resorting to building from + source. +* The library crate will provide **declarations** for functions in `libfoo`, + but it does **not** provide bindings or higher-level abstractions. + +The set of `*-sys` packages provides a common set of dependencies for linking +to native libraries. There are a number of benefits earned from having this +convention of native-library-related packages: + +* Common dependencies on `foo-sys` alleviates the above rule about one package + per value of `links`. +* A common dependency allows centralizing logic on discovering `libfoo` itself + (or building it from source). +* These dependencies are easily overridable. + +### Building libgit2 + +Now that we’ve got libgit2’s dependencies sorted out, we need to actually write +the build script. We’re not going to look at specific snippets of code here and +instead only take a look at the high-level details of the build script of +`libgit2-sys`. This is not recommending all packages follow this strategy, but +rather just outlining one specific strategy. + +The first step of the build script should do is to query whether libgit2 is +already installed on the host system. To do this we’ll leverage the preexisting +tool `pkg-config` (when its available). We’ll also use a `build-dependencies` +section to refactor out all the `pkg-config` related code (or someone’s already +done that!). + +If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t +installed, the next step is to build libgit2 from bundled source code +(distributed as part of `libgit2-sys` itself). There are a few nuances when +doing so that we need to take into account, however: + +* The build system of libgit2, `cmake`, needs to be able to find libgit2’s + optional dependency of libssh2. We’re sure we’ve already built it (it’s a + Cargo dependency), we just need to communicate this information. To do this + we leverage the metadata format to communicate information between build + scripts. In this example the libssh2 package printed out `cargo:root=...` to + tell us where libssh2 is installed at, and we can then pass this along to + cmake with the `CMAKE_PREFIX_PATH` environment variable. + +* We’ll need to handle some `CFLAGS` values when compiling C code (and tell + `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit + code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well. + +* Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR` + environment variable, and then we’ll print the necessary metadata to instruct + rustc how to link to libgit2. + +Most of the functionality of this build script is easily refactorable into +common dependencies, so our build script isn’t quite as intimidating as this +descriptions! In reality it’s expected that build scripts are quite succinct by +farming logic such as above to build dependencies. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/config.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/config.md new file mode 100644 index 000000000..047853eec --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/config.md @@ -0,0 +1,139 @@ +## Configuration + +This document will explain how Cargo’s configuration system works, as well as +available keys or configuration. For configuration of a project through its +manifest, see the [manifest format](reference/manifest.html). + +### Hierarchical structure + + +Cargo allows local configuration for a particular project as well as global +configuration, like git. Cargo extends this to a hierarchical strategy. +If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the +following configuration files would be probed for and unified in this order: + +* `/projects/foo/bar/baz/.cargo/config` +* `/projects/foo/bar/.cargo/config` +* `/projects/foo/.cargo/config` +* `/projects/.cargo/config` +* `/.cargo/config` +* `$HOME/.cargo/config` + +With this structure, you can specify configuration per-project, and even +possibly check it into version control. You can also specify personal defaults +with a configuration file in your home directory. + +### Configuration format + +All configuration is currently in the [TOML format][toml] (like the manifest), +with simple key-value pairs inside of sections (tables) which all get merged +together. + +[toml]: https://github.com/toml-lang/toml + +### Configuration keys + +All of the following keys are optional, and their defaults are listed as their +value unless otherwise noted. + +Key values that specify a tool may be given as an absolute path, a relative path +or as a pathless tool name. Absolute paths and pathless tool names are used as +given. Relative paths are resolved relative to the parent directory of the +`.cargo` directory of the config file that the value resides within. + +```toml +# An array of paths to local repositories which are to be used as overrides for +# dependencies. For more information see the Specifying Dependencies guide. +paths = ["/path/to/override"] + +[cargo-new] +# This is your name/email to place in the `authors` section of a new Cargo.toml +# that is generated. If not present, then `git` will be probed, and if that is +# not present then `$USER` and `$EMAIL` will be used. +name = "..." +email = "..." + +# By default `cargo new` will initialize a new Git repository. This key can be +# set to `hg` to create a Mercurial repository, or `none` to disable this +# behavior. +vcs = "none" + +# For the following sections, $triple refers to any valid target triple, not the +# literal string "$triple", and it will apply whenever that target triple is +# being compiled to. 'cfg(...)' refers to the Rust-like `#[cfg]` syntax for +# conditional compilation. +[target.$triple] +# This is the linker which is passed to rustc (via `-C linker=`) when the `$triple` +# is being compiled for. By default this flag is not passed to the compiler. +linker = ".." +# Same but for the library archiver which is passed to rustc via `-C ar=`. +ar = ".." +# If a runner is provided, compiled targets for the `$triple` will be executed +# by invoking the specified runner executable with actual target as first argument. +# This applies to `cargo run`, `cargo test` and `cargo bench` commands. +# By default compiled targets are executed directly. +runner = ".." +# custom flags to pass to all compiler invocations that target $triple +# this value overrides build.rustflags when both are present +rustflags = ["..", ".."] + +[target.'cfg(...)'] +# Similar for the $triple configuration, but using the `cfg` syntax. +# If several `cfg` and $triple targets are candidates, then the rustflags +# are concatenated. The `cfg` syntax only applies to rustflags, and not to +# linker. +rustflags = ["..", ".."] + +# Configuration keys related to the registry +[registry] +index = "..." # URL of the registry index (defaults to the central repository) +token = "..." # Access token (found on the central repo’s website) + +[http] +proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none) + # in libcurl format, e.g. "socks5h://host:port" +timeout = 60000 # Timeout for each HTTP request, in milliseconds +cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional) +check-revoke = true # Indicates whether SSL certs are checked for revocation + +[build] +jobs = 1 # number of parallel jobs, defaults to # of CPUs +rustc = "rustc" # the rust compiler tool +rustdoc = "rustdoc" # the doc generator tool +target = "triple" # build for the target triple +target-dir = "target" # path of where to place all generated artifacts +rustflags = ["..", ".."] # custom flags to pass to all compiler invocations + +[term] +verbose = false # whether cargo provides verbose output +color = 'auto' # whether cargo colorizes output + +# Network configuration +[net] +retry = 2 # number of times a network call will automatically retried + +# Alias cargo commands. The first 3 aliases are built in. If your +# command requires grouped whitespace use the list format. +[alias] +b = "build" +t = "test" +r = "run" +rr = "run --release" +space_example = ["run", "--release", "--", "\"command list\""] +``` + +### Environment variables + +Cargo can also be configured through environment variables in addition to the +TOML syntax above. For each configuration key above of the form `foo.bar` the +environment variable `CARGO_FOO_BAR` can also be used to define the value. For +example the `build.jobs` key can also be defined by `CARGO_BUILD_JOBS`. + +Environment variables will take precedent over TOML configuration, and currently +only integer, boolean, and string keys are supported to be defined by +environment variables. + +In addition to the system above, Cargo recognizes a few other specific +[environment variables][env]. + +[env]: reference/environment-variables.html diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/environment-variables.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/environment-variables.md new file mode 100644 index 000000000..e95b79d6c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/environment-variables.md @@ -0,0 +1,130 @@ +## Environment Variables + +Cargo sets and reads a number of environment variables which your code can detect +or override. Here is a list of the variables Cargo sets, organized by when it interacts +with them: + +### Environment variables Cargo reads + +You can override these environment variables to change Cargo's behavior on your +system: + +* `CARGO_HOME` - Cargo maintains a local cache of the registry index and of git + checkouts of crates. By default these are stored under `$HOME/.cargo`, but + this variable overrides the location of this directory. Once a crate is cached + it is not removed by the clean command. +* `CARGO_TARGET_DIR` - Location of where to place all generated artifacts, + relative to the current working directory. +* `RUSTC` - Instead of running `rustc`, Cargo will execute this specified + compiler instead. +* `RUSTC_WRAPPER` - Instead of simply running `rustc`, Cargo will execute this + specified wrapper instead, passing as its commandline arguments the rustc + invocation, with the first argument being rustc. +* `RUSTDOC` - Instead of running `rustdoc`, Cargo will execute this specified + `rustdoc` instance instead. +* `RUSTDOCFLAGS` - A space-separated list of custom flags to pass to all `rustdoc` + invocations that Cargo performs. In contrast with `cargo rustdoc`, this is + useful for passing a flag to *all* `rustdoc` instances. +* `RUSTFLAGS` - A space-separated list of custom flags to pass to all compiler + invocations that Cargo performs. In contrast with `cargo rustc`, this is + useful for passing a flag to *all* compiler instances. + +Note that Cargo will also read environment variables for `.cargo/config` +configuration values, as described in [that documentation][config-env] + +[config-env]: reference/config.html#environment-variables + +### Environment variables Cargo sets for crates + +Cargo exposes these environment variables to your crate when it is compiled. +Note that this applies for test binaries as well. +To get the value of any of these variables in a Rust program, do this: + +```rust +let version = env!("CARGO_PKG_VERSION"); +``` + +`version` will now contain the value of `CARGO_PKG_VERSION`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest of your package. +* `CARGO_PKG_VERSION` - The full version of your package. +* `CARGO_PKG_VERSION_MAJOR` - The major version of your package. +* `CARGO_PKG_VERSION_MINOR` - The minor version of your package. +* `CARGO_PKG_VERSION_PATCH` - The patch version of your package. +* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. +* `CARGO_PKG_AUTHORS` - Colon separated list of authors from the manifest of your package. +* `CARGO_PKG_NAME` - The name of your package. +* `CARGO_PKG_DESCRIPTION` - The description of your package. +* `CARGO_PKG_HOMEPAGE` - The home page of your package. +* `OUT_DIR` - If the package has a build script, this is set to the folder where the build + script should place its output. See below for more information. + +### Environment variables Cargo sets for build scripts + +Cargo sets several environment variables when build scripts are run. Because these variables +are not yet set when the build script is compiled, the above example using `env!` won't work +and instead you'll need to retrieve the values when the build script is run: + +```rust +use std::env; +let out_dir = env::var("OUT_DIR").unwrap(); +``` + +`out_dir` will now contain the value of `OUT_DIR`. + +* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package + being built (the package containing the build + script). Also note that this is the value of the + current working directory of the build script when it + starts. +* `CARGO_MANIFEST_LINKS` - the manifest `links` value. +* `CARGO_FEATURE_` - For each activated feature of the package being + built, this environment variable will be present + where `` is the name of the feature uppercased + and having `-` translated to `_`. +* `CARGO_CFG_` - For each [configuration option][configuration] of the + package being built, this environment variable will + contain the value of the configuration, where `` is + the name of the configuration uppercased and having `-` + translated to `_`. + Boolean configurations are present if they are set, and + not present otherwise. + Configurations with multiple values are joined to a + single variable with the values delimited by `,`. +* `OUT_DIR` - the folder in which all output should be placed. This folder is + inside the build directory for the package being built, and it is + unique for the package in question. +* `TARGET` - the target triple that is being compiled for. Native code should be + compiled for this triple. Some more information about target + triples can be found in [clang’s own documentation][clang]. +* `HOST` - the host triple of the rust compiler. +* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can + be useful to pass a `-j` parameter to a system like `make`. Note + that care should be taken when interpreting this environment + variable. For historical purposes this is still provided but + recent versions of Cargo, for example, do not need to run `make + -j` as it'll automatically happen. Cargo implements its own + [jobserver] and will allow build scripts to inherit this + information, so programs compatible with GNU make jobservers will + already have appropriately configured parallelism. +* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the + profile currently being built. +* `PROFILE` - `release` for release builds, `debug` for other builds. +* `DEP__` - For more information about this set of environment + variables, see build script documentation about [`links`][links]. +* `RUSTC`, `RUSTDOC` - the compiler and documentation generator that Cargo has + resolved to use, passed to the build script so it might + use it as well. + +[links]: reference/build-scripts.html#the-links-manifest-key +[profile]: reference/manifest.html#the-profile-sections +[configuration]: https://doc.rust-lang.org/reference/attributes.html#conditional-compilation +[clang]:http://clang.llvm.org/docs/CrossCompilation.html#target-triple + +### Environment variables Cargo sets for 3rd party subcommands + +Cargo exposes this environment variable to 3rd party subcommands +(ie. programs named `cargo-foobar` placed in `$PATH`): + +* `CARGO` - Path to the `cargo` binary performing the build. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/external-tools.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/external-tools.md new file mode 100644 index 000000000..0ba2c5186 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/external-tools.md @@ -0,0 +1,103 @@ +## External tools + +One of the goals of Cargo is simple integration with third-party tools, like +IDEs and other build systems. To make integration easier, Cargo has several +facilities: + +* a `cargo metadata` command, which outputs project structure and dependencies + information in JSON, + +* a `--message-format` flag, which outputs information about a particular build, + and + +* support for custom subcommands. + + +### Information about project structure + +You can use `cargo metadata` command to get information about project structure +and dependencies. The output of the command looks like this: + +```text +{ + // Integer version number of the format. + "version": integer, + + // List of packages for this workspace, including dependencies. + "packages": [ + { + // Opaque package identifier. + "id": PackageId, + + "name": string, + + "version": string, + + "source": SourceId, + + // A list of declared dependencies, see `resolve` field for actual dependencies. + "dependencies": [ Dependency ], + + "targets: [ Target ], + + // Path to Cargo.toml + "manifest_path": string, + } + ], + + "workspace_members": [ PackageId ], + + // Dependencies graph. + "resolve": { + "nodes": [ + { + "id": PackageId, + "dependencies": [ PackageId ] + } + ] + } +} +``` + +The format is stable and versioned. When calling `cargo metadata`, you should +pass `--format-version` flag explicitly to avoid forward incompatibility +hazard. + +If you are using Rust, there is [cargo_metadata] crate. + +[cargo_metadata]: https://crates.io/crates/cargo_metadata + + +### Information about build + +When passing `--message-format=json`, Cargo will output the following +information during the build: + +* compiler errors and warnings, + +* produced artifacts, + +* results of the build scripts (for example, native dependencies). + +The output goes to stdout in the JSON object per line format. The `reason` field +distinguishes different kinds of messages. + +Information about dependencies in the Makefile-compatible format is stored in +the `.d` files alongside the artifacts. + + +### Custom subcommands + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. This is achieved by translating a cargo invocation of the form +cargo `(?[^ ]+)` into an invocation of an external tool +`cargo-${command}` that then needs to be present in one of the user's `$PATH` +directories. + +Custom subcommand may use `CARGO` environment variable to call back to +Cargo. Alternatively, it can link to `cargo` crate as a library, but this +approach has drawbacks: + +* Cargo as a library is unstable, API changes without deprecation, + +* versions of Cargo library and Cargo binary may be different. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/index.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/index.md new file mode 100644 index 000000000..4fec36165 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/index.md @@ -0,0 +1,16 @@ +## Cargo Reference + +Now that you have an overview of how to use Cargo and have created your first +crate, you may be interested in more details in the following areas. + +The reference covers the details of various areas of Cargo. + +* [Specifying Dependencies](reference/specifying-dependencies.html) +* [The Manifest Format](reference/manifest.html) +* [Configuration](reference/config.html) +* [Environment Variables](reference/environment-variables.html) +* [Build Scripts](reference/build-scripts.html) +* [Publishing on crates.io](reference/publishing.html) +* [Package ID Specifications](reference/pkgid-spec.html) +* [Source Replacement](reference/source-replacement.html) +* [External Tools](reference/external-tools.html) diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/manifest.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/manifest.md new file mode 100644 index 000000000..6a79c7a1b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/manifest.md @@ -0,0 +1,762 @@ +## The Manifest Format + +The `Cargo.toml` file for each package is called its *manifest*. Every manifest +file consists of one or more sections. + +### The `[package]` section + +The first section in a `Cargo.toml` is `[package]`. + +```toml +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["you@example.com"] +``` + +All three of these fields are mandatory. + +#### The `version` field + +Cargo bakes in the concept of [Semantic +Versioning](http://semver.org/), so make sure you follow some basic rules: + +* Before you reach 1.0.0, anything goes, but if you make breaking changes, + increment the minor version. In Rust, breaking changes include adding fields to + structs or variants to enums. +* After 1.0.0, only make breaking changes when you increment the major version. + Don’t break the build. +* After 1.0.0, don’t add any new public API (no new `pub` anything) in tiny + versions. Always increment the minor version if you add any new `pub` structs, + traits, fields, types, functions, methods or anything else. +* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. + +#### The `build` field (optional) + +This field specifies a file in the project root which is a [build script][1] for +building native code. More information can be found in the build script +[guide][1]. + +[1]: reference/build-scripts.html + +```toml +[package] +# ... +build = "build.rs" +``` + +#### The `documentation` field (optional) + +This field specifies a URL to a website hosting the crate's documentation. +If no URL is specified in the manifest file, [crates.io][cratesio] will +automatically link your crate to the corresponding [docs.rs][docsrs] page. + +Documentation links from specific hosts are blacklisted. Hosts are added +to the blacklist if they are known to not be hosting documentation and are +possibly of malicious intent e.g. ad tracking networks. URLs from the +following hosts are blacklisted: + +* rust-ci.org + +Documentation URLs from blacklisted hosts will not appear on crates.io, and +may be replaced by docs.rs links. + +[docsrs]: https://docs.rs/ +[cratesio]: https://crates.io/ + +#### The `exclude` and `include` fields (optional) + +You can explicitly specify to Cargo that a set of [globs][globs] should be +ignored or included for the purposes of packaging and rebuilding a package. The +globs specified in the `exclude` field identify a set of files that are not +included when a package is published as well as ignored for the purposes of +detecting when to rebuild a package, and the globs in `include` specify files +that are explicitly included. + +If a VCS is being used for a package, the `exclude` field will be seeded with +the VCS’ ignore settings (`.gitignore` for git for example). + +```toml +[package] +# ... +exclude = ["build/**/*.o", "doc/**/*.html"] +``` + +```toml +[package] +# ... +include = ["src/**/*", "Cargo.toml"] +``` + +The options are mutually exclusive: setting `include` will override an +`exclude`. Note that `include` must be an exhaustive list of files as otherwise +necessary source files may not be included. + +[globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html + +#### Migrating to `gitignore`-like pattern matching + +The current interpretation of these configs is based on UNIX Globs, as +implemented in the [`glob` crate](https://crates.io/crates/glob). We want +Cargo's `include` and `exclude` configs to work as similar to `gitignore` as +possible. [The `gitignore` specification](https://git-scm.com/docs/gitignore) is +also based on Globs, but has a bunch of additional features that enable easier +pattern writing and more control. Therefore, we are migrating the interpretation +for the rules of these configs to use the [`ignore` +crate](https://crates.io/crates/ignore), and treat them each rule as a single +line in a `gitignore` file. See [the tracking +issue](https://github.com/rust-lang/cargo/issues/4268) for more details on the +migration. + +#### The `publish` field (optional) + +The `publish` field can be used to prevent a package from being published to a +package registry (like *crates.io*) by mistake. + +```toml +[package] +# ... +publish = false +``` + +#### The `workspace` field (optional) + +The `workspace` field can be used to configure the workspace that this package +will be a member of. If not specified this will be inferred as the first +Cargo.toml with `[workspace]` upwards in the filesystem. + +```toml +[package] +# ... +workspace = "path/to/workspace/root" +``` + +For more information, see the documentation for the workspace table below. + +#### Package metadata + +There are a number of optional metadata fields also accepted under the +`[package]` section: + +```toml +[package] +# ... + +# A short blurb about the package. This is not rendered in any format when +# uploaded to crates.io (aka this is not markdown). +description = "..." + +# These URLs point to more information about the package. These are +# intended to be webviews of the relevant data, not necessarily compatible +# with VCS tools and the like. +documentation = "..." +homepage = "..." +repository = "..." + +# This points to a file under the package root (relative to this `Cargo.toml`). +# The contents of this file are stored and indexed in the registry. +readme = "..." + +# This is a list of up to five keywords that describe this crate. Keywords +# are searchable on crates.io, and you may choose any words that would +# help someone find this crate. +keywords = ["...", "..."] + +# This is a list of up to five categories where this crate would fit. +# Categories are a fixed list available at crates.io/category_slugs, and +# they must match exactly. +categories = ["...", "..."] + +# This is a string description of the license for this package. Currently +# crates.io will validate the license provided against a whitelist of known +# license identifiers from http://spdx.org/licenses/. Multiple licenses can be +# separated with a `/`. +license = "..." + +# If a project is using a nonstandard license, then this key may be specified in +# lieu of the above key and must point to a file relative to this manifest +# (similar to the readme key). +license-file = "..." + +# Optional specification of badges to be displayed on crates.io. +# +# - The badges pertaining to build status that are currently available are +# Appveyor, CircleCI, GitLab, and TravisCI. +# - Available badges pertaining to code test coverage are Codecov and +# Coveralls. +# - There are also maintenance-related badges basesed on isitmaintained.com +# which state the issue resolution time, percent of open issues, and future +# maintenance intentions. +# +# If a `repository` key is required, this refers to a repository in +# `user/repo` format. +[badges] + +# Appveyor: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`. +appveyor = { repository = "...", branch = "master", service = "github" } + +# Circle CI: `repository` is required. `branch` is optional; default is `master` +circle-ci = { repository = "...", branch = "master" } + +# GitLab: `repository` is required. `branch` is optional; default is `master` +gitlab = { repository = "...", branch = "master" } + +# Travis CI: `repository` in format "/" is required. +# `branch` is optional; default is `master` +travis-ci = { repository = "...", branch = "master" } + +# Codecov: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`. +codecov = { repository = "...", branch = "master", service = "github" } + +# Coveralls: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default) and `bitbucket`. +coveralls = { repository = "...", branch = "master", service = "github" } + +# Is it maintained resolution time: `repository` is required. +is-it-maintained-issue-resolution = { repository = "..." } + +# Is it maintained percentage of open issues: `repository` is required. +is-it-maintained-open-issues = { repository = "..." } + +# Maintenance: `status` is required Available options are `actively-developed`, +# `passively-maintained`, `as-is`, `none`, `experimental`, `looking-for-maintainer` +# and `deprecated`. +maintenance = { status = "..." } +``` + +The [crates.io](https://crates.io) registry will render the description, display +the license, link to the three URLs and categorize by the keywords. These keys +provide useful information to users of the registry and also influence the +search ranking of a crate. It is highly discouraged to omit everything in a +published crate. + +#### The `metadata` table (optional) + +Cargo by default will warn about unused keys in `Cargo.toml` to assist in +detecting typos and such. The `package.metadata` table, however, is completely +ignored by Cargo and will not be warned about. This section can be used for +tools which would like to store project configuration in `Cargo.toml`. For +example: + +```toml +[package] +name = "..." +# ... + +# Metadata used when generating an Android APK, for example. +[package.metadata.android] +package-name = "my-awesome-android-app" +assets = "path/to/static" +``` + +### Dependency sections + +See the [specifying dependencies page](reference/specifying-dependencies.html) for +information on the `[dependencies]`, `[dev-dependencies]`, +`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. + +### The `[profile.*]` sections + +Cargo supports custom configuration of how rustc is invoked through profiles at +the top level. Any manifest may declare a profile, but only the top level +project’s profiles are actually read. All dependencies’ profiles will be +overridden. This is done so the top-level project has control over how its +dependencies are compiled. + +There are five currently supported profile names, all of which have the same +configuration available to them. Listed below is the configuration available, +along with the defaults for each profile. + +```toml +# The development profile, used for `cargo build`. +[profile.dev] +opt-level = 0 # controls the `--opt-level` the compiler builds with. + # 0-1 is good for debugging. 2 is well-optimized. Max is 3. +debug = true # include debug information (debug symbols). Equivalent to + # `-C debuginfo=2` compiler flag. +rpath = false # controls whether compiler should set loader paths. + # If true, passes `-C rpath` flag to the compiler. +lto = false # Link Time Optimization usually reduces size of binaries + # and static libraries. Increases compilation time. + # If true, passes `-C lto` flag to the compiler. +debug-assertions = true # controls whether debug assertions are enabled + # (e.g. debug_assert!() and arithmetic overflow checks) +codegen-units = 1 # if > 1 enables parallel code generation which improves + # compile times, but prevents some optimizations. + # Passes `-C codegen-units`. Ignored when `lto = true`. +panic = 'unwind' # panic strategy (`-C panic=...`), can also be 'abort' + +# The release profile, used for `cargo build --release`. +[profile.release] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 1 +panic = 'unwind' + +# The testing profile, used for `cargo test`. +[profile.test] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 1 +panic = 'unwind' + +# The benchmarking profile, used for `cargo bench`. +[profile.bench] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 1 +panic = 'unwind' + +# The documentation profile, used for `cargo doc`. +[profile.doc] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 1 +panic = 'unwind' +``` + +### The `[features]` section + +Cargo supports features to allow expression of: + +* conditional compilation options (usable through `cfg` attributes); +* optional dependencies, which enhance a package, but are not required; and +* clusters of optional dependencies, such as `postgres`, that would include the + `postgres` package, the `postgres-macros` package, and possibly other packages + (such as development-time mocking libraries, debugging tools, etc.). + +A feature of a package is either an optional dependency, or a set of other +features. The format for specifying features is: + +```toml +[package] +name = "awesome" + +[features] +# The default set of optional packages. Most people will want to use these +# packages, but they are strictly optional. Note that `session` is not a package +# but rather another feature listed in this manifest. +default = ["jquery", "uglifier", "session"] + +# A feature with no dependencies is used mainly for conditional compilation, +# like `#[cfg(feature = "go-faster")]`. +go-faster = [] + +# The `secure-password` feature depends on the bcrypt package. This aliasing +# will allow people to talk about the feature in a higher-level way and allow +# this package to add more requirements to the feature in the future. +secure-password = ["bcrypt"] + +# Features can be used to reexport features of other packages. The `session` +# feature of package `awesome` will ensure that the `session` feature of the +# package `cookie` is also enabled. +session = ["cookie/session"] + +[dependencies] +# These packages are mandatory and form the core of this package’s distribution. +cookie = "1.2.0" +oauth = "1.1.0" +route-recognizer = "=2.1.0" + +# A list of all of the optional dependencies, some of which are included in the +# above `features`. They can be opted into by apps. +jquery = { version = "1.0.2", optional = true } +uglifier = { version = "1.5.3", optional = true } +bcrypt = { version = "*", optional = true } +civet = { version = "*", optional = true } +``` + +To use the package `awesome`: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +#### Rules + +The usage of features is subject to a few rules: + +* Feature names must not conflict with other package names in the manifest. This + is because they are opted into via `features = [...]`, which only has a single + namespace. +* With the exception of the `default` feature, all features are opt-in. To opt + out of the default feature, use `default-features = false` and cherry-pick + individual features. +* Feature groups are not allowed to cyclically depend on one another. +* Dev-dependencies cannot be optional. +* Features groups can only reference optional dependencies. +* When a feature is selected, Cargo will call `rustc` with `--cfg + feature="${feature_name}"`. If a feature group is included, it and all of its + individual features will be included. This can be tested in code via + `#[cfg(feature = "foo")]`. + +Note that it is explicitly allowed for features to not actually activate any +optional dependencies. This allows packages to internally enable/disable +features without requiring a new dependency. + +#### Usage in end products + +One major use-case for this feature is specifying optional features in +end-products. For example, the Servo project may want to include optional +features that people can enable or disable when they build it. + +In that case, Servo will describe features in its `Cargo.toml` and they can be +enabled using command-line flags: + +```shell +$ cargo build --release --features "shumway pdf" +``` + +Default features could be excluded using `--no-default-features`. + +#### Usage in packages + +In most cases, the concept of *optional dependency* in a library is best +expressed as a separate package that the top-level application depends on. + +However, high-level packages, like Iron or Piston, may want the ability to +curate a number of packages for easy installation. The current Cargo system +allows them to curate a number of mandatory dependencies into a single package +for easy installation. + +In some cases, packages may want to provide additional curation for optional +dependencies: + +* grouping a number of low-level optional dependencies together into a single + high-level feature; +* specifying packages that are recommended (or suggested) to be included by + users of the package; and +* including a feature (like `secure-password` in the motivating example) that + will only work if an optional dependency is available, and would be difficult + to implement as a separate package (for example, it may be overly difficult to + design an IO package to be completely decoupled from OpenSSL, with opt-in via + the inclusion of a separate package). + +In almost all cases, it is an antipattern to use these features outside of +high-level packages that are designed for curation. If a feature is optional, it +can almost certainly be expressed as a separate package. + +### The `[workspace]` section + +Projects can define a workspace which is a set of crates that will all share the +same `Cargo.lock` and output directory. The `[workspace]` table can be defined +as: + +```toml +[workspace] + +# Optional key, inferred if not present +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] + +# Optional key, empty if not present +exclude = ["path1", "path/to/dir2"] +``` + +Workspaces were added to Cargo as part of [RFC 1525] and have a number of +properties: + +* A workspace can contain multiple crates where one of them is the *root crate*. +* The *root crate*'s `Cargo.toml` contains the `[workspace]` table, but is not + required to have other configuration. +* Whenever any crate in the workspace is compiled, output is placed in the + *workspace root*. i.e. next to the *root crate*'s `Cargo.toml`. +* The lock file for all crates in the workspace resides in the *workspace root*. +* The `[patch]` and `[replace]` sections in `Cargo.toml` are only recognized + in the *root crate*'s manifest, and ignored in member crates' manifests. + +[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md + +The *root crate* of a workspace, indicated by the presence of `[workspace]` in +its manifest, is responsible for defining the entire workspace. All `path` +dependencies residing in the workspace directory become members. You can add +additional packages to the workspace by listing them in the `members` key. Note +that members of the workspaces listed explicitly will also have their path +dependencies included in the workspace. Sometimes a project may have a lot of +workspace members and it can be onerous to keep up to date. The path dependency +can also use [globs][globs] to match multiple paths. Finally, the `exclude` +key can be used to blacklist paths from being included in a workspace. This can +be useful if some path dependencies aren't desired to be in the workspace at +all. + +The `package.workspace` manifest key (described above) is used in member crates +to point at a workspace's root crate. If this key is omitted then it is inferred +to be the first crate whose manifest contains `[workspace]` upwards in the +filesystem. + +A crate may either specify `package.workspace` or specify `[workspace]`. That +is, a crate cannot both be a root crate in a workspace (contain `[workspace]`) +and also be a member crate of another workspace (contain `package.workspace`). + +Most of the time workspaces will not need to be dealt with as `cargo new` and +`cargo init` will handle workspace configuration automatically. + +#### Virtual Manifest + +In workspace manifests, if the `package` table is present, the workspace root +crate will be treated as a normal package, as well as a worksapce. If the +`package` table is not present in a worksapce manifest, it is called a *virtual +manifest*. + +When working with *virtual manifests*, package-related cargo commands, like +`cargo build`, won't be available anymore. But, most of such commands support +the `--all` option, will execute the command for all the non-virtual manifest in +the workspace. + +#TODO: move this to a more appropriate place +### The project layout + +If your project is an executable, name the main source file `src/main.rs`. If it +is a library, name the main source file `src/lib.rs`. + +Cargo will also treat any files located in `src/bin/*.rs` as executables. If your +executable consists of more than just one source file, you might also use a directory +inside `src/bin` containing a `main.rs` file which will be treated as an executable +with a name of the parent directory. +Do note, however, once you add a `[[bin]]` section ([see +below](#configuring-a-target)), Cargo will no longer automatically build files +located in `src/bin/*.rs`. Instead you must create a `[[bin]]` section for +each file you want to build. + +Your project can optionally contain folders named `examples`, `tests`, and +`benches`, which Cargo will treat as containing examples, +integration tests, and benchmarks respectively. + +```shell +▾ src/ # directory containing source files + lib.rs # the main entry point for libraries and packages + main.rs # the main entry point for projects producing executables + ▾ bin/ # (optional) directory containing additional executables + *.rs + ▾ */ # (optional) directories containing multi-file executables + main.rs +▾ examples/ # (optional) examples + *.rs +▾ tests/ # (optional) integration tests + *.rs +▾ benches/ # (optional) benchmarks + *.rs +``` + +To structure your code after you've created the files and folders for your +project, you should remember to use Rust's module system, which you can read +about in [the book](https://doc.rust-lang.org/book/crates-and-modules.html). + +### Examples + +Files located under `examples` are example uses of the functionality provided by +the library. When compiled, they are placed in the `target/examples` directory. + +They can compile either as executables (with a `main()` function) or libraries +and pull in the library by using `extern crate `. They are +compiled when you run your tests to protect them from bitrotting. + +You can run individual executable examples with the command `cargo run --example +`. + +Specify `crate-type` to make an example be compiled as a library: + +```toml +[[example]] +name = "foo" +crate-type = ["staticlib"] +``` + +You can build individual library examples with the command `cargo build +--example `. + +### Tests + +When you run `cargo test`, Cargo will: + +* compile and run your library’s unit tests, which are in the files reachable + from `lib.rs` (naturally, any sections marked with `#[cfg(test)]` will be + considered at this stage); +* compile and run your library’s documentation tests, which are embedded inside + of documentation blocks; +* compile and run your library’s [integration tests](#integration-tests); and +* compile your library’s examples. + +#### Integration tests + +Each file in `tests/*.rs` is an integration test. When you run `cargo test`, +Cargo will compile each of these files as a separate crate. The crate can link +to your library by using `extern crate `, like any other code that +depends on it. + +Cargo will not automatically compile files inside subdirectories of `tests`, but +an integration test can import modules from these directories as usual. For +example, if you want several integration tests to share some code, you can put +the shared code in `tests/common/mod.rs` and then put `mod common;` in each of +the test files. + +### Configuring a target + +All of the `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]` +sections support similar configuration for specifying how a target should be +built. The double-bracket sections like `[[bin]]` are array-of-table of +[TOML](https://github.com/toml-lang/toml#array-of-tables), which means you can +write more than one `[[bin]]` section to make several executables in your crate. + +The example below uses `[lib]`, but it also applies to all other sections +as well. All values listed are the defaults for that option unless otherwise +specified. + +```toml +[package] +# ... + +[lib] +# The name of a target is the name of the library that will be generated. This +# is defaulted to the name of the package or project, with any dashes replaced +# with underscores. (Rust `extern crate` declarations reference this name; +# therefore the value must be a valid Rust identifier to be usable.) +name = "foo" + +# This field points at where the crate is located, relative to the `Cargo.toml`. +path = "src/lib.rs" + +# A flag for enabling unit tests for this target. This is used by `cargo test`. +test = true + +# A flag for enabling documentation tests for this target. This is only relevant +# for libraries, it has no effect on other sections. This is used by +# `cargo test`. +doctest = true + +# A flag for enabling benchmarks for this target. This is used by `cargo bench`. +bench = true + +# A flag for enabling documentation of this target. This is used by `cargo doc`. +doc = true + +# If the target is meant to be a compiler plugin, this field must be set to true +# for Cargo to correctly compile it and make it available for all dependencies. +plugin = false + +# If the target is meant to be a "macros 1.1" procedural macro, this field must +# be set to true. +proc-macro = false + +# If set to false, `cargo test` will omit the `--test` flag to rustc, which +# stops it from generating a test harness. This is useful when the binary being +# built manages the test runner itself. +harness = true +``` + +#### The `required-features` field (optional) + +The `required-features` field specifies which features the target needs in order +to be built. If any of the required features are not selected, the target will +be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`, +and `[[example]]` sections, it has no effect on `[lib]`. + +```toml +[features] +# ... +postgres = [] +sqlite = [] +tools = [] + +[[bin]] +# ... +required-features = ["postgres", "tools"] +``` + +#### Building dynamic or static libraries + +If your project produces a library, you can specify which kind of library to +build by explicitly listing the library in your `Cargo.toml`: + +```toml +# ... + +[lib] +name = "..." +crate-type = ["dylib"] # could be `staticlib` as well +``` + +The available options are `dylib`, `rlib`, `staticlib`, `cdylib`, and +`proc-macro`. You should only use this option in a project. Cargo will always +compile packages (dependencies) based on the requirements of the project that +includes them. + +You can read more about the different crate types in the +[Rust Reference Manual](https://doc.rust-lang.org/reference/linkage.html) + +### The `[patch]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[patch.crates-io] +foo = { git = 'https://github.com/example/foo' } +bar = { path = 'my/local/bar' } +``` + +The `[patch]` table is made of dependency-like sub-tables. Each key after +`[patch]` is a URL of the source that's being patched, or `crates-io` if +you're modifying the https://crates.io registry. In the example above +`crates-io` could be replaced with a git URL such as +`https://github.com/rust-lang-nursery/log`. + +Each entry in these tables is a normal dependency specification, the same as +found in the `[dependencies]` section of the manifest. The dependencies listed +in the `[patch]` section are resolved and used to patch the source at the +URL specified. The above manifest snippet patches the `crates-io` source (e.g. +crates.io itself) with the `foo` crate and `bar` crate. + +Sources can be patched with versions of crates that do not exist, and they can +also be patched with versions of crates that already exist. If a source is +patched with a crate version that already exists in the source, then the +source's original crate is replaced. + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation and [RFC 1969] for the +technical specification of this feature. (Note that the `[patch]` feature will +first become available in Rust 1.21, set to be released on 2017-10-12.) + +[RFC 1969]: https://github.com/rust-lang/rfcs/pull/1969 +[replace]: reference/specifying-dependencies.html#overriding-dependencies + +### The `[replace]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[replace] +"foo:0.1.0" = { git = 'https://github.com/example/foo' } +"bar:1.0.2" = { path = 'my/local/bar' } +``` + +Each key in the `[replace]` table is a [package id +specification](reference/pkgid-spec.html) which allows arbitrarily choosing a node in the +dependency graph to override. The value of each key is the same as the +`[dependencies]` syntax for specifying dependencies, except that you can't +specify features. Note that when a crate is overridden the copy it's overridden +with must have both the same name and version, but it can come from a different +source (e.g. git or a local path). + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/pkgid-spec.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/pkgid-spec.md new file mode 100644 index 000000000..bd7ac2d92 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/pkgid-spec.md @@ -0,0 +1,44 @@ +## Package ID Specifications + +### Package ID specifications + +Subcommands of Cargo frequently need to refer to a particular package within a +dependency graph for various operations like updating, cleaning, building, etc. +To solve this problem, Cargo supports Package ID Specifications. A specification +is a string which is used to uniquely refer to one package within a graph of +packages. + +#### Specification grammar + +The formal grammar for a Package Id Specification is: + +```notrust +pkgid := pkgname + | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] +pkgname := name [ ":" semver ] + +proto := "http" | "git" | ... +``` + +Here, brackets indicate that the contents are optional. + +#### Example specifications + +These could all be references to a package `foo` version `1.2.3` from the +registry at `crates.io` + +| pkgid | name | version | url | +|:-----------------------------|:-----:|:-------:|:----------------------:| +| `foo` | `foo` | `*` | `*` | +| `foo:1.2.3` | `foo` | `1.2.3` | `*` | +| `crates.io/foo` | `foo` | `*` | `*://crates.io/foo` | +| `crates.io/foo#1.2.3` | `foo` | `1.2.3` | `*://crates.io/foo` | +| `crates.io/bar#foo:1.2.3` | `foo` | `1.2.3` | `*://crates.io/bar` | +| `http://crates.io/foo#1.2.3` | `foo` | `1.2.3` | `http://crates.io/foo` | + +#### Brevity of specifications + +The goal of this is to enable both succinct and exhaustive syntaxes for +referring to packages in a dependency graph. Ambiguous references may refer to +one or more packages. Most commands generate an error if more than one package +could be referred to with the same specification. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/publishing.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/publishing.md new file mode 100644 index 000000000..76a36a103 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/publishing.md @@ -0,0 +1,222 @@ +## Publishing on crates.io + +Once you've got a library that you'd like to share with the world, it's time to +publish it on [crates.io]! Publishing a crate is when a specific +version is uploaded to be hosted on [crates.io]. + +Take care when publishing a crate, because a publish is **permanent**. The +version can never be overwritten, and the code cannot be deleted. There is no +limit to the number of versions which can be published, however. + +### Before your first publish + +First thing’s first, you’ll need an account on [crates.io] to acquire +an API token. To do so, [visit the home page][crates.io] and log in via a GitHub +account (required for now). After this, visit your [Account +Settings](https://crates.io/me) page and run the `cargo login` command +specified. + +```shell +$ cargo login abcdefghijklmnopqrstuvwxyz012345 +``` + +This command will inform Cargo of your API token and store it locally in your +`~/.cargo/credentials` (previously it was `~/.cargo/config`). Note that this +token is a **secret** and should not be shared with anyone else. If it leaks for +any reason, you should regenerate it immediately. + +### Before publishing a new crate + +Keep in mind that crate names on [crates.io] are allocated on a first-come-first- +serve basis. Once a crate name is taken, it cannot be used for another crate. + +#### Packaging a crate + +The next step is to package up your crate into a format that can be uploaded to +[crates.io]. For this we’ll use the `cargo package` subcommand. This will take +our entire crate and package it all up into a `*.crate` file in the +`target/package` directory. + +```shell +$ cargo package +``` + +As an added bonus, the `*.crate` will be verified independently of the current +source tree. After the `*.crate` is created, it’s unpacked into +`target/package` and then built from scratch to ensure that all necessary files +are there for the build to succeed. This behavior can be disabled with the +`--no-verify` flag. + +Now’s a good time to take a look at the `*.crate` file to make sure you didn’t +accidentally package up that 2GB video asset, or large data files used for code +generation, integration tests, or benchmarking. There is currently a 10MB +upload size limit on `*.crate` files. So, if the size of `tests` and `benches` +directories and their dependencies are up to a couple of MBs, you can keep them +in your package; otherwsie, better to exclude them. + +Cargo will automatically ignore files ignored by your version control system +when packaging, but if you want to specify an extra set of files to ignore you +can use the `exclude` key in the manifest: + +```toml +[package] +# ... +exclude = [ + "public/assets/*", + "videos/*", +] +``` + +The syntax of each element in this array is what +[rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you’d rather +roll with a whitelist instead of a blacklist, Cargo also supports an `include` +key, which if set, overrides the `exclude` key: + +```toml +[package] +# ... +include = [ + "**/*.rs", + "Cargo.toml", +] +``` + +### Uploading the crate + +Now that we’ve got a `*.crate` file ready to go, it can be uploaded to +[crates.io] with the `cargo publish` command. And that’s it, you’ve now published +your first crate! + +```shell +$ cargo publish +``` + +If you’d like to skip the `cargo package` step, the `cargo publish` subcommand +will automatically package up the local crate if a copy isn’t found already. + +Be sure to check out the [metadata you can +specify](reference/manifest.html#package-metadata) to ensure your crate can be +discovered more easily! + +### Publishing a new version of an existing crate + +In order to release a new version, change the `version` value specified in your +`Cargo.toml` manifest. Keep in mind [the semver +rules](reference/manifest.html#the-version-field). Then optionally run `cargo package` if +you want to inspect the `*.crate` file for the new version before publishing, +and run `cargo publish` to upload the new version. + +### Managing a crates.io-based crate + +Management of crates is primarily done through the command line `cargo` tool +rather than the [crates.io] web interface. For this, there are a few subcommands +to manage a crate. + +#### `cargo yank` + +Occasions may arise where you publish a version of a crate that actually ends up +being broken for one reason or another (syntax error, forgot to include a file, +etc.). For situations such as this, Cargo supports a “yank” of a version of a +crate. + +```shell +$ cargo yank --vers 1.0.1 +$ cargo yank --vers 1.0.1 --undo +``` + +A yank **does not** delete any code. This feature is not intended for deleting +accidentally uploaded secrets, for example. If that happens, you must reset +those secrets immediately. + +The semantics of a yanked version are that no new dependencies can be created +against that version, but all existing dependencies continue to work. One of the +major goals of [crates.io] is to act as a permanent archive of crates that does +not change over time, and allowing deletion of a version would go against this +goal. Essentially a yank means that all projects with a `Cargo.lock` will not +break, while any future `Cargo.lock` files generated will not list the yanked +version. + +#### `cargo owner` + +A crate is often developed by more than one person, or the primary maintainer +may change over time! The owner of a crate is the only person allowed to publish +new versions of the crate, but an owner may designate additional owners. + +```shell +$ cargo owner --add my-buddy +$ cargo owner --remove my-buddy +$ cargo owner --add github:rust-lang:owners +$ cargo owner --remove github:rust-lang:owners +``` + +The owner IDs given to these commands must be GitHub user names or GitHub teams. + +If a user name is given to `--add`, that user becomes a “named” owner, with +full rights to the crate. In addition to being able to publish or yank versions +of the crate, they have the ability to add or remove owners, *including* the +owner that made *them* an owner. Needless to say, you shouldn’t make people you +don’t fully trust into a named owner. In order to become a named owner, a user +must have logged into [crates.io] previously. + +If a team name is given to `--add`, that team becomes a “team” owner, with +restricted right to the crate. While they have permission to publish or yank +versions of the crate, they *do not* have the ability to add or remove owners. +In addition to being more convenient for managing groups of owners, teams are +just a bit more secure against owners becoming malicious. + +The syntax for teams is currently `github:org:team` (see examples above). +In order to add a team as an owner one must be a member of that team. No +such restriction applies to removing a team as an owner. + +### GitHub permissions + +Team membership is not something GitHub provides simple public access to, and it +is likely for you to encounter the following message when working with them: + +> It looks like you don’t have permission to query a necessary property from +GitHub to complete this request. You may need to re-authenticate on [crates.io] +to grant permission to read GitHub org memberships. Just go to +https://crates.io/login + +This is basically a catch-all for “you tried to query a team, and one of the +five levels of membership access control denied this”. That is not an +exaggeration. GitHub’s support for team access control is Enterprise Grade. + +The most likely cause of this is simply that you last logged in before this +feature was added. We originally requested *no* permissions from GitHub when +authenticating users, because we didn’t actually ever use the user’s token for +anything other than logging them in. However to query team membership on your +behalf, we now require +[the `read:org` scope](https://developer.github.com/v3/oauth/#scopes). + +You are free to deny us this scope, and everything that worked before teams +were introduced will keep working. However you will never be able to add a team +as an owner, or publish a crate as a team owner. If you ever attempt to do this, +you will get the error above. You may also see this error if you ever try to +publish a crate that you don’t own at all, but otherwise happens to have a team. + +If you ever change your mind, or just aren’t sure if [crates.io] has sufficient +permission, you can always go to https://crates.io/login, which will prompt you +for permission if [crates.io] doesn’t have all the scopes it would like to. + +An additional barrier to querying GitHub is that the organization may be +actively denying third party access. To check this, you can go to: + + https://github.com/organizations/:org/settings/oauth_application_policy + +where `:org` is the name of the organization (e.g. rust-lang). You may see +something like: + +![Organization Access Control](images/org-level-acl.png) + +Where you may choose to explicitly remove [crates.io] from your organization’s +blacklist, or simply press the “Remove Restrictions” button to allow all third +party applications to access this data. + +Alternatively, when [crates.io] requested the `read:org` scope, you could have +explicitly whitelisted [crates.io] querying the org in question by pressing +the “Grant Access” button next to its name: + +![Authentication Access Control](images/auth-level-acl.png) + +[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/source-replacement.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/source-replacement.md new file mode 100644 index 000000000..343a7296a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/source-replacement.md @@ -0,0 +1,128 @@ +## Source Replacement + +Cargo supports the ability to **replace one source with another** to express +strategies along the lines of mirrors or vendoring dependencies. Configuration +is currently done through the [`.cargo/config` configuration][config] mechanism, +like so: + +[config]: reference/config.html + +```toml +# The `source` table is where all keys related to source-replacement +# are stored. +[source] + +# Under the `source` table are a number of other tables whose keys are a +# name for the relevant source. For example this section defines a new +# source, called `my-awesome-source`, which comes from a directory +# located at `vendor` relative to the directory containing this `.cargo/config` +# file +[source.my-awesome-source] +directory = "vendor" + +# The crates.io default source for crates is available under the name +# "crates-io", and here we use the `replace-with` key to indicate that it's +# replaced with our source above. +[source.crates-io] +replace-with = "my-awesome-source" +``` + +With this configuration Cargo attempts to look up all crates in the directory +"vendor" rather than querying the online registry at crates.io. Using source +replacement Cargo can express: + +* Vendoring - custom sources can be defined which represent crates on the local + filesystem. These sources are subsets of the source that they're replacing and + can be checked into projects if necessary. + +* Mirroring - sources can be replaced with an equivalent version which acts as a + cache for crates.io itself. + +Cargo has a core assumption about source replacement that the source code is +exactly the same from both sources. In our above example Cargo assumes that all +of the crates coming from `my-awesome-source` are the exact same as the copies +from `crates-io`. Note that this also means that `my-awesome-source` is not +allowed to have crates which are not present in the `crates-io` source. + +As a consequence, source replacement is not appropriate for situations such as +patching a dependency or a private registry. Cargo supports patching +dependencies through the usage of [the `[replace]` key][replace-section], and +private registry support is planned for a future version of Cargo. + +[replace-section]: reference/manifest.html#the-replace-section + +### Configuration + +Configuration of replacement sources is done through [`.cargo/config`][config] +and the full set of available keys are: + +```toml +# Each source has its own table where the key is the name of the source +[source.the-source-name] + +# Indicate that `the-source-name` will be replaced with `another-source`, +# defined elsewhere +replace-with = "another-source" + +# Available kinds of sources that can be specified (described below) +registry = "https://example.com/path/to/index" +local-registry = "path/to/registry" +directory = "path/to/vendor" +``` + +The `crates-io` represents the crates.io online registry (default source of +crates) and can be replaced with: + +```toml +[source.crates-io] +replace-with = 'another-source' +``` + +### Registry Sources + +A "registry source" is one that is the same as crates.io itself. That is, it has +an index served in a git repository which matches the format of the +[crates.io index](https://github.com/rust-lang/crates.io-index). That repository +then has configuration indicating where to download crates from. + +Currently there is not an already-available project for setting up a mirror of +crates.io. Stay tuned though! + +### Local Registry Sources + +A "local registry source" is intended to be a subset of another registry +source, but available on the local filesystem (aka vendoring). Local registries +are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are +made up of a set of `*.crate` files and an index like the normal registry is. + +The primary way to manage and crate local registry sources is through the +[`cargo-local-registry`][cargo-local-registry] subcommand, available on +crates.io and can be installed with `cargo install cargo-local-registry`. + +[cargo-local-registry]: https://crates.io/crates/cargo-local-registry + +Local registries are contained within one directory and contain a number of +`*.crate` files downloaded from crates.io as well as an `index` directory with +the same format as the crates.io-index project (populated with just entries for +the crates that are present). + +### Directory Sources + +A "directory source" is similar to a local registry source where it contains a +number of crates available on the local filesystem, suitable for vendoring +dependencies. Also like local registries, directory sources can primarily be +managed by an external subcommand, [`cargo-vendor`][cargo-vendor], which can be +installed with `cargo install cargo-vendor`. + +[cargo-vendor]: https://crates.io/crates/cargo-vendor + +Directory sources are distinct from local registries though in that they contain +the unpacked version of `*.crate` files, making it more suitable in some +situations to check everything into source control. A directory source is just a +directory containing a number of other directories which contain the source code +for crates (the unpacked version of `*.crate` files). Currently no restriction +is placed on the name of each directory. + +Each crate in a directory source also has an associated metadata file indicating +the checksum of each file in the crate to protect against accidental +modifications. diff --git a/collector/compile-benchmarks/cargo/src/doc/book/src/reference/specifying-dependencies.md b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/specifying-dependencies.md new file mode 100644 index 000000000..b4f81fa8b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/book/src/reference/specifying-dependencies.md @@ -0,0 +1,524 @@ +## Specifying Dependencies + +Your crates can depend on other libraries from [crates.io], `git` repositories, or +subdirectories on your local file system. You can also temporarily override the +location of a dependency— for example, to be able to test out a bug fix in the +dependency that you are working on locally. You can have different +dependencies for different platforms, and dependencies that are only used during +development. Let's take a look at how to do each of these. + +### Specifying dependencies from crates.io + +Cargo is configured to look for dependencies on [crates.io] by default. Only +the name and a version string are required in this case. In [the cargo +guide](guide/index.html), we specified a dependency on the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The string `"0.1.12"` is a [semver] version requirement. Since this +string does not have any operators in it, it is interpreted the same way as +if we had specified `"^0.1.12"`, which is called a caret requirement. + +[semver]: https://github.com/steveklabnik/semver#requirements + +### Caret requirements + +**Caret requirements** allow SemVer compatible updates to a specified version. +An update is allowed if the new version number does not modify the left-most +non-zero digit in the major, minor, patch grouping. In this case, if we ran +`cargo update -p time`, cargo would update us to version `0.1.13` if it was +available, but would not update us to `0.2.0`. If instead we had specified the +version string as `^1.0`, cargo would update to `1.1` but not `2.0`. The version +`0.0.x` is not considered compatible with any other version. + +Here are some more examples of caret requirements and the versions that would +be allowed with them: + +```notrust +^1.2.3 := >=1.2.3 <2.0.0 +^1.2 := >=1.2.0 <2.0.0 +^1 := >=1.0.0 <2.0.0 +^0.2.3 := >=0.2.3 <0.3.0 +^0.0.3 := >=0.0.3 <0.0.4 +^0.0 := >=0.0.0 <0.1.0 +^0 := >=0.0.0 <1.0.0 +``` + +This compatibility convention is different from SemVer in the way it treats +versions before 1.0.0. While SemVer says there is no compatibility before +1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z` +and `x > 0`. + +### Tilde requirements + +**Tilde requirements** specify a minimal version with some ability to update. +If you specify a major, minor, and patch version or only a major and minor +version, only patch-level changes are allowed. If you only specify a major +version, then minor- and patch-level changes are allowed. + +`~1.2.3` is an example of a tilde requirement. + +```notrust +~1.2.3 := >=1.2.3 <1.3.0 +~1.2 := >=1.2.0 <1.3.0 +~1 := >=1.0.0 <2.0.0 +``` + +### Wildcard requirements + +**Wildcard requirements** allow for any version where the wildcard is +positioned. + +`*`, `1.*` and `1.2.*` are examples of wildcard requirements. + +```notrust +* := >=0.0.0 +1.* := >=1.0.0 <2.0.0 +1.2.* := >=1.2.0 <1.3.0 +``` + +### Inequality requirements + +**Inequality requirements** allow manually specifying a version range or an +exact version to depend on. + +Here are some examples of inequality requirements: + +```notrust +>= 1.2.0 +> 1 +< 2 += 1.2.3 +``` + +### Multiple requirements + +Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, +< 1.5`. + +### Specifying dependencies from `git` repositories + +To depend on a library located in a `git` repository, the minimum information +you need to specify is the location of the repository with the `git` key: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand" } +``` + +Cargo will fetch the `git` repository at this location then look for a +`Cargo.toml` for the requested crate anywhere inside the `git` repository +(not necessarily at the root). + +Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. +You can combine the `git` key with the `rev`, `tag`, or `branch` keys to +specify something else. Here's an example of specifying that you want to use +the latest commit on a branch named `next`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand", branch = "next" } +``` + +### Specifying path dependencies + +Over time, our `hello_world` project from [the guide](guide/index.html) has +grown significantly in size! It’s gotten to the point that we probably want to +split out a separate crate for others to use. To do this Cargo supports **path +dependencies** which are typically sub-crates that live within one repository. +Let’s start off by making a new crate inside of our `hello_world` project: + +```shell +# inside of hello_world/ +$ cargo new hello_utils +``` + +This will create a new folder `hello_utils` inside of which a `Cargo.toml` and +`src` folder are ready to be configured. In order to tell Cargo about this, open +up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: + +```toml +[dependencies] +hello_utils = { path = "hello_utils" } +``` + +This tells Cargo that we depend on a crate called `hello_utils` which is found +in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). + +And that’s it! The next `cargo build` will automatically build `hello_utils` and +all of its own dependencies, and others can also start using the crate as well. +However, crates that use dependencies specified with only a path are not +permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we +would need to publish a version of `hello_utils` to [crates.io](https://crates.io) +and specify its version in the dependencies line as well: + +```toml +[dependencies] +hello_utils = { path = "hello_utils", version = "0.1.0" } +``` + +### Overriding dependencies + +There are a number of methods in Cargo to support overriding dependencies and +otherwise controlling the dependency graph. These options are typically, though, +only available at the workspace level and aren't propagated through +dependencies. In other words, "applications" have the ability to override +dependencies but "libraries" do not. + +The desire to override a dependency or otherwise alter some dependencies can +arise through a number of scenarios. Most of them, however, boil down to the +ability to work with a crate before it's been published to crates.io. For +example: + +* A crate you're working on is also used in a much larger application you're + working on, and you'd like to test a bug fix to the library inside of the + larger application. +* An upstream crate you don't work on has a new feature or a bug fix on the + master branch of its git repository which you'd like to test out. +* You're about to publish a new major version of your crate, but you'd like to + do integration testing across an entire project to ensure the new major + version works. +* You've submitted a fix to an upstream crate for a bug you found, but you'd + like to immediately have your application start depending on the fixed version + of the crate to avoid blocking on the bug fix getting merged. + +These scenarios are currently all solved with the [`[patch]` manifest +section][patch-section]. Note that the `[patch]` feature is not yet currently +stable and will be released on 2017-08-31. Historically some of these scenarios +have been solved with [the `[replace]` section][replace-section], but we'll +document the `[patch]` section here. + +[patch-section]: reference/manifest.html#the-patch-section +[replace-section]: reference/manifest.html#the-replace-section + +### Testing a bugfix + +Let's say you're working with the [`uuid`] crate but while you're working on it +you discover a bug. You are, however, quite enterprising so you decide to also +try out to fix the bug! Originally your manifest will look like: + +[`uuid`](https://crates.io/crates/uuid) + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0" +``` + +First thing we'll do is to clone the [`uuid` repository][uuid-repository] +locally via: + +```shell +$ git clone https://github.com/rust-lang-nursery/uuid +``` + +Next we'll edit the manifest of `my-library` to contain: + +```toml +[patch.crates-io] +uuid = { path = "../path/to/uuid" } +``` + +Here we declare that we're *patching* the source `crates-io` with a new +dependency. This will effectively add the local checked out version of `uuid` to +the crates.io registry for our local project. + +Next up we need to ensure that our lock file is updated to use this new version +of `uuid` so our project uses the locally checked out copy instead of one from +crates.io. The way `[patch]` works is that it'll load the dependency at +`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` +it'll *also* return the local version. + +This means that the version number of the local checkout is significant and will +affect whether the patch is used. Our manifest declared `uuid = "1.0"` which +means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution +algorithm also means that we'll resolve to the maximum version within that +range. Typically this doesn't matter as the version of the git repository will +already be greater or match the maximum version published on crates.io, but it's +important to keep this in mind! + +In any case, typically all you need to do now is: + +```shell +$ cargo build + Compiling uuid v1.0.0 (file://.../uuid) + Compiling my-library v0.1.0 (file://.../my-library) + Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs +``` + +And that's it! You're now building with the local version of `uuid` (note the +`file://` in the build output). If you don't see the `file://` version getting +built then you may need to run `cargo update -p uuid --precise $version` where +`$version` is the version of the locally checked out copy of `uuid`. + +Once you've fixed the bug you originally found the next thing you'll want to do +is to likely submit that as a pull request to the `uuid` crate itself. Once +you've done this then you can also update the `[patch]` section. The listing +inside of `[patch]` is just like the `[dependencies]` section, so once your pull +request is merged you could change your `path` dependency to: + +```toml +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +[uuid-repository]: https://github.com/rust-lang-nursery/uuid + +### Working with an unpublished minor version + +Let's now shift gears a bit from bug fixes to adding features. While working on +`my-library` you discover that a whole new feature is needed in the `uuid` +crate. You've implemented this feature, tested it locally above with `[patch]`, +and submitted a pull request. Let's go over how you continue to use and test it +before it's actually published. + +Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but +since then the master branch of the git repository has updated to `1.0.1`. This +branch includes your new feature you submitted previously. To use this +repository we'll edit our `Cargo.toml` to look like + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0.1" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Note that our local dependency on `uuid` has been updated to `1.0.1` as it's +what we'll actually require once the crate is published. This version doesn't +exist on crates.io, though, so we provide it with the `[patch]` section of the +manifest. + +Now when our library is built it'll fetch `uuid` from the git repository and +resolve to 1.0.1 inside the repository instead of trying to download a version +from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can +be deleted. + +It's also worth nothing that `[patch]` applies *transitively*. Let's say you use +`my-library` in a larger project, such as: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Remember that `[patch]` is only applicable at the *top level* so we consumers of +`my-library` have to repeat the `[patch]` section if necessary. Here, though, +the new `uuid` crate applies to *both* our dependency on `uuid` and the +`my-library -> uuid` dependency. The `uuid` crate will be resolved to one +version for this entire crate graph, 1.0.1, and it'll be pulled from the git +repository. + +### Prepublishing a breaking change + +As a final scenario, let's take a look at working with a new major version of a +crate, typically accompanied with breaking changes. Sticking with our previous +crates, this means that we're going to be creating version 2.0.0 of the `uuid` +crate. After we've submitted all changes upstream we can update our manifest for +`my-library` to look like: + +```toml +[dependencies] +uuid = "2.0" + +[patch.crates-io] +uuid = { git = "https://github.com/rust-lang-nursery/uuid", branch = "2.0.0" } +``` + +And that's it! Like with the previous example the 2.0.0 version doesn't actually +exist on crates.io but we can still put it in through a git dependency through +the usage of the `[patch]` section. As a thought exercise let's take another +look at the `my-binary` manifest from above again as well: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid', version = '2.0.0' } +``` + +Note that this will actually resolve to two versions of the `uuid` crate. The +`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but +the `my-library` crate will use the 2.0.0 version of `uuid`. This will allow you +to gradually roll out breaking changes to a crate through a dependency graph +without being force to update everything all at once. + +### Overriding with local dependencies + +Sometimes you're only temporarily working on a crate and you don't want to have +to modify `Cargo.toml` like with the `[patch]` section above. For this use +case Cargo offers a much more limited version of overrides called **path +overrides**. + +Path overrides are specified through `.cargo/config` instead of `Cargo.toml`, +and you can find [more documentation about this configuration][config-docs]. +Inside of `.cargo/config` you'll specify a key called `paths`: + +[config-docs]: config.html + +```toml +paths = ["/path/to/uuid"] +``` + +This array should be filled with directories that contain a `Cargo.toml`. In +this instance, we’re just adding `uuid`, so it will be the only one that’s +overridden. This path can be either absolute or relative to the directory that +contains the `.cargo` folder. + +Path overrides are more restricted than the `[patch]` section, however, in +that they cannot change the structure of the dependency graph. When a +path replacement is used then the previous set of dependencies +must all match exactly to the new `Cargo.toml` specification. For example this +means that path overrides cannot be used to test out adding a dependency to a +crate, instead `[patch]` must be used in that situation. As a result usage of a +path override is typically isolated to quick bug fixes rather than larger +changes. + +Note: using a local configuration to override paths will only work for crates +that have been published to [crates.io]. You cannot use this feature to tell +Cargo how to find local unpublished crates. + +### Platform specific dependencies + + +Platform-specific dependencies take the same format, but are listed under a +`target` section. Normally Rust-like `#[cfg]` syntax will be used to define +these sections: + +```toml +[target.'cfg(windows)'.dependencies] +winhttp = "0.4.0" + +[target.'cfg(unix)'.dependencies] +openssl = "1.0.1" + +[target.'cfg(target_arch = "x86")'.dependencies] +native = { path = "native/i686" } + +[target.'cfg(target_arch = "x86_64")'.dependencies] +native = { path = "native/x86_64" } +``` + +Like with Rust, the syntax here supports the `not`, `any`, and `all` operators +to combine various cfg name/value pairs. Note that the `cfg` syntax has only +been available since Cargo 0.9.0 (Rust 1.8.0). + +In addition to `#[cfg]` syntax, Cargo also supports listing out the full target +the dependencies would apply to: + +```toml +[target.x86_64-pc-windows-gnu.dependencies] +winhttp = "0.4.0" + +[target.i686-unknown-linux-gnu.dependencies] +openssl = "1.0.1" +``` + +If you’re using a custom target specification, quote the full path and file +name: + +```toml +[target."x86_64/windows.json".dependencies] +winhttp = "0.4.0" + +[target."i686/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/i686" } + +[target."x86_64/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/x86_64" } +``` + +### Development dependencies + +You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format +is equivalent to `[dependencies]`: + +```toml +[dev-dependencies] +tempdir = "0.3" +``` + +Dev-dependencies are not used when compiling +a package for building, but are used for compiling tests, examples, and +benchmarks. + +These dependencies are *not* propagated to other packages which depend on this +package. + +You can also have target-specific development dependencies by using +`dev-dependencies` in the target section header instead of `dependencies`. For +example: + +```toml +[target.'cfg(unix)'.dev-dependencies] +mio = "0.0.1" +``` + +[crates.io]: https://crates.io/ + +### Build dependencies + +You can depend on other Cargo-based crates for use in your build scripts. +Dependencies are declared through the `build-dependencies` section of the +manifest: + +```toml +[build-dependencies] +gcc = "0.3" +``` + +The build script **does not** have access to the dependencies listed +in the `dependencies` or `dev-dependencies` section. Build +dependencies will likewise not be available to the package itself +unless listed under the `dependencies` section as well. A package +itself and its build script are built separately, so their +dependencies need not coincide. Cargo is kept simpler and cleaner by +using independent dependencies for independent purposes. + +### Choosing features + +If a package you depend on offers conditional features, you can +specify which to use: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +More information about features can be found in the +[manifest documentation](reference/manifest.html#the-features-section). diff --git a/collector/compile-benchmarks/cargo/src/doc/book/theme/favicon.png b/collector/compile-benchmarks/cargo/src/doc/book/theme/favicon.png new file mode 100644 index 000000000..a91ad692c Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/book/theme/favicon.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/build-script.md b/collector/compile-benchmarks/cargo/src/doc/build-script.md new file mode 100644 index 000000000..d05e6ba2c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/build-script.md @@ -0,0 +1,556 @@ +% Build Script Support + +Some packages need to compile third-party non-Rust code, for example C +libraries. Other packages need to link to C libraries which can either be +located on the system or possibly need to be built from source. Others still +need facilities for functionality such as code generation before building (think +parser generators). + +Cargo does not aim to replace other tools that are well-optimized for +these tasks, but it does integrate with them with the `build` configuration +option. + +```toml +[package] +# ... +build = "build.rs" +``` + +The Rust file designated by the `build` command (relative to the package root) +will be compiled and invoked before anything else is compiled in the package, +allowing your Rust code to depend on the built or generated artifacts. Note that +if you do not specify a value for `build` but your package root does contains a +`"build.rs"` file, Cargo will compile and invoke this file for you. + +Some example use cases of the build command are: + +* Building a bundled C library. +* Finding a C library on the host system. +* Generating a Rust module from a specification. +* Performing any platform-specific configuration needed for the crate. + +Each of these use cases will be detailed in full below to give examples of how +the build command works. + +## Inputs to the Build Script + +When the build script is run, there are a number of inputs to the build script, +all passed in the form of [environment variables][env]. + +In addition to environment variables, the build script’s current directory is +the source directory of the build script’s package. + +[env]: environment-variables.html + +## Outputs of the Build Script + +All the lines printed to stdout by a build script are written to a file like +`target/debug/build//output` (the precise location may depend on your +configuration). Any line that starts with `cargo:` is interpreted directly by +Cargo. This line must be of the form `cargo:key=value`, like the examples below: + +```notrust +# specially recognized by Cargo +cargo:rustc-link-lib=static=foo +cargo:rustc-link-search=native=/path/to/foo +cargo:rustc-cfg=foo +cargo:rustc-env=FOO=bar +# arbitrary user-defined metadata +cargo:root=/path/to/foo +cargo:libdir=/path/to/foo/lib +cargo:include=/path/to/foo/include +``` + +On the other hand, lines printed to stderr are written to a file like +`target/debug/build//stderr` but are not interpreted by cargo. + +There are a few special keys that Cargo recognizes, some affecting how the +crate is built: + +* `rustc-link-lib=[KIND=]NAME` indicates that the specified value is a library + name and should be passed to the compiler as a `-l` flag. The optional `KIND` + can be one of `static`, `dylib` (the default), or `framework`, see + `rustc --help` for more details. +* `rustc-link-search=[KIND=]PATH` indicates the specified value is a library + search path and should be passed to the compiler as a `-L` flag. The optional + `KIND` can be one of `dependency`, `crate`, `native`, `framework` or `all` + (the default), see `rustc --help` for more details. +* `rustc-flags=FLAGS` is a set of flags passed to the compiler, only `-l` and + `-L` flags are supported. +* `rustc-cfg=FEATURE` indicates that the specified feature will be passed as a + `--cfg` flag to the compiler. This is often useful for performing compile-time + detection of various features. +* `rustc-env=VAR=VALUE` indicates that the specified environment variable + will be added to the environment which the compiler is run within. + The value can be then retrieved by the `env!` macro in the compiled crate. + This is useful for embedding additional metadata in crate's code, + such as the hash of Git HEAD or the unique identifier of a continuous + integration server. +* `rerun-if-changed=PATH` is a path to a file or directory which indicates that + the build script should be re-run if it changes (detected by a more-recent + last-modified timestamp on the file). Normally build scripts are re-run if + any file inside the crate root changes, but this can be used to scope changes + to just a small set of files. (If this path points to a directory the entire + directory will not be traversed for changes -- only changes to the timestamp + of the directory itself (which corresponds to some types of changes within the + directory, depending on platform) will trigger a rebuild. To request a re-run + on any changes within an entire directory, print a line for the directory and + another line for everything inside it, recursively.) + Note that if the build script itself (or one of its dependencies) changes, + then it's rebuilt and rerun unconditionally, so + `cargo:rerun-if-changed=build.rs` is almost always redundant (unless you + want to ignore changes in all other files except for `build.rs`). +* `rerun-if-env-changed=VAR` is the name of an environment variable which + indicates that if the environment variable's value changes the build script + should be rerun. This basically behaves the same as `rerun-if-changed` except + that it works with environment variables instead. Note that the environment + variables here are intended for global environment variables like `CC` and + such, it's not necessary to use this for env vars like `TARGET` that Cargo + sets. Also note that if `rerun-if-env-changed` is printed out then Cargo will + *only* rerun the build script if those environment variables change or if + files printed out by `rerun-if-changed` change. + +* `warning=MESSAGE` is a message that will be printed to the main console after + a build script has finished running. Warnings are only shown for path + dependencies (that is, those you're working on locally), so for example + warnings printed out in crates.io crates are not emitted by default. + +Any other element is a user-defined metadata that will be passed to +dependents. More information about this can be found in the [`links`][links] +section. + +[links]: #the-links-manifest-key + +## Build Dependencies + +Build scripts are also allowed to have dependencies on other Cargo-based crates. +Dependencies are declared through the `build-dependencies` section of the +manifest. + +```toml +[build-dependencies] +foo = { git = "https://github.com/your-packages/foo" } +``` + +The build script **does not** have access to the dependencies listed in the +`dependencies` or `dev-dependencies` section (they’re not built yet!). All build +dependencies will also not be available to the package itself unless explicitly +stated as so. + +## The `links` Manifest Key + +In addition to the manifest key `build`, Cargo also supports a `links` manifest +key to declare the name of a native library that is being linked to: + +```toml +[package] +# ... +links = "foo" +build = "build.rs" +``` + +This manifest states that the package links to the `libfoo` native library, and +it also has a build script for locating and/or building the library. Cargo +requires that a `build` command is specified if a `links` entry is also +specified. + +The purpose of this manifest key is to give Cargo an understanding about the set +of native dependencies that a package has, as well as providing a principled +system of passing metadata between package build scripts. + +Primarily, Cargo requires that there is at most one package per `links` value. +In other words, it’s forbidden to have two packages link to the same native +library. Note, however, that there are [conventions in place][star-sys] to +alleviate this. + +[star-sys]: #-sys-packages + +As mentioned above in the output format, each build script can generate an +arbitrary set of metadata in the form of key-value pairs. This metadata is +passed to the build scripts of **dependent** packages. For example, if `libbar` +depends on `libfoo`, then if `libfoo` generates `key=value` as part of its +metadata, then the build script of `libbar` will have the environment variables +`DEP_FOO_KEY=value`. + +Note that metadata is only passed to immediate dependents, not transitive +dependents. The motivation for this metadata passing is outlined in the linking +to system libraries case study below. + +## Overriding Build Scripts + +If a manifest contains a `links` key, then Cargo supports overriding the build +script specified with a custom library. The purpose of this functionality is to +prevent running the build script in question altogether and instead supply the +metadata ahead of time. + +To override a build script, place the following configuration in any acceptable +Cargo [configuration location](config.html). + +```toml +[target.x86_64-unknown-linux-gnu.foo] +rustc-link-search = ["/path/to/foo"] +rustc-link-lib = ["foo"] +root = "/path/to/foo" +key = "value" +``` + +This section states that for the target `x86_64-unknown-linux-gnu` the library +named `foo` has the metadata specified. This metadata is the same as the +metadata generated as if the build script had run, providing a number of +key/value pairs where the `rustc-flags`, `rustc-link-search`, and +`rustc-link-lib` keys are slightly special. + +With this configuration, if a package declares that it links to `foo` then the +build script will **not** be compiled or run, and the metadata specified will +instead be used. + +# Case study: Code generation + +Some Cargo packages need to have code generated just before they are compiled +for various reasons. Here we’ll walk through a simple example which generates a +library call as part of the build script. + +First, let’s take a look at the directory structure of this package: + +```notrust +. +├── Cargo.toml +├── build.rs +└── src + └── main.rs + +1 directory, 3 files +``` + +Here we can see that we have a `build.rs` build script and our binary in +`main.rs`. Next, let’s take a look at the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-from-generated-code" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +Here we can see we’ve got a build script specified which we’ll use to generate +some code. Let’s see what’s inside the build script: + +```rust,no_run +// build.rs + +use std::env; +use std::fs::File; +use std::io::Write; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("hello.rs"); + let mut f = File::create(&dest_path).unwrap(); + + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); +} +``` + +There’s a couple of points of note here: + +* The script uses the `OUT_DIR` environment variable to discover where the + output files should be located. It can use the process’ current working + directory to find where the input files should be located, but in this case we + don’t have any input files. +* This script is relatively simple as it just writes out a small generated file. + One could imagine that other more fanciful operations could take place such as + generating a Rust module from a C header file or another language definition, + for example. + +Next, let’s peek at the library itself: + +```rust,ignore +// src/main.rs + +include!(concat!(env!("OUT_DIR"), "/hello.rs")); + +fn main() { + println!("{}", message()); +} +``` + +This is where the real magic happens. The library is using the rustc-defined +`include!` macro in combination with the `concat!` and `env!` macros to include +the generated file (`hello.rs`) into the crate’s compilation. + +Using the structure shown here, crates can include any number of generated files +from the build script itself. + +# Case study: Building some native code + +Sometimes it’s necessary to build some native C or C++ code as part of a +package. This is another excellent use case of leveraging the build script to +build a native library before the Rust crate itself. As an example, we’ll create +a Rust library which calls into C to print “Hello, World!”. + +Like above, let’s first take a look at the project layout: + +```notrust +. +├── Cargo.toml +├── build.rs +└── src + ├── hello.c + └── main.rs + +1 directory, 4 files +``` + +Pretty similar to before! Next, the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-world-from-c" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +For now we’re not going to use any build dependencies, so let’s take a look at +the build script now: + +```rust,no_run +// build.rs + +use std::process::Command; +use std::env; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + + // note that there are a number of downsides to this approach, the comments + // below detail how to improve the portability of these commands. + Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) + .arg(&format!("{}/hello.o", out_dir)) + .status().unwrap(); + Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) + .current_dir(&Path::new(&out_dir)) + .status().unwrap(); + + println!("cargo:rustc-link-search=native={}", out_dir); + println!("cargo:rustc-link-lib=static=hello"); +} +``` + +This build script starts out by compiling our C file into an object file (by +invoking `gcc`) and then converting this object file into a static library (by +invoking `ar`). The final step is feedback to Cargo itself to say that our +output was in `out_dir` and the compiler should link the crate to `libhello.a` +statically via the `-l static=hello` flag. + +Note that there are a number of drawbacks to this hardcoded approach: + +* The `gcc` command itself is not portable across platforms. For example it’s + unlikely that Windows platforms have `gcc`, and not even all Unix platforms + may have `gcc`. The `ar` command is also in a similar situation. +* These commands do not take cross-compilation into account. If we’re cross + compiling for a platform such as Android it’s unlikely that `gcc` will produce + an ARM executable. + +Not to fear, though, this is where a `build-dependencies` entry would help! The +Cargo ecosystem has a number of packages to make this sort of task much easier, +portable, and standardized. For example, the build script could be written as: + +```rust,ignore +// build.rs + +// Bring in a dependency on an externally maintained `gcc` package which manages +// invoking the C compiler. +extern crate gcc; + +fn main() { + gcc::compile_library("libhello.a", &["src/hello.c"]); +} +``` + +Add a build time dependency on the `gcc` crate with the following addition to +your `Cargo.toml`: + +```toml +[build-dependencies] +gcc = "0.3" +``` + +The [`gcc` crate](https://crates.io/crates/gcc) abstracts a range of build +script requirements for C code: + +* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` + for Unix platforms, etc.). +* It takes the `TARGET` variable into account by passing appropriate flags to + the compiler being used. +* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all + handled automatically. +* The stdout output and `OUT_DIR` locations are also handled by the `gcc` + library. + +Here we can start to see some of the major benefits of farming as much +functionality as possible out to common build dependencies rather than +duplicating logic across all build scripts! + +Back to the case study though, let’s take a quick look at the contents of the +`src` directory: + +```c +// src/hello.c + +#include + +void hello() { + printf("Hello, World!\n"); +} +``` + +```rust,ignore +// src/main.rs + +// Note the lack of the `#[link]` attribute. We’re delegating the responsibility +// of selecting what to link to over to the build script rather than hardcoding +// it in the source file. +extern { fn hello(); } + +fn main() { + unsafe { hello(); } +} +``` + +And there we go! This should complete our example of building some C code from a +Cargo package using the build script itself. This also shows why using a build +dependency can be crucial in many situations and even much more concise! + +We’ve also seen a brief example of how a build script can use a crate as a +dependency purely for the build process and not for the crate itself at runtime. + +# Case study: Linking to system libraries + +The final case study here will be investigating how a Cargo library links to a +system library and how the build script is leveraged to support this use case. + +Quite frequently a Rust crate wants to link to a native library often provided +on the system to bind its functionality or just use it as part of an +implementation detail. This is quite a nuanced problem when it comes to +performing this in a platform-agnostic fashion, and the purpose of a build +script is again to farm out as much of this as possible to make this as easy as +possible for consumers. + +As an example to follow, let’s take a look at one of [Cargo’s own +dependencies][git2-rs], [libgit2][libgit2]. The C library has a number of +constraints: + +[git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys +[libgit2]: https://github.com/libgit2/libgit2 + +* It has an optional dependency on OpenSSL on Unix to implement the https + transport. +* It has an optional dependency on libssh2 on all platforms to implement the ssh + transport. +* It is often not installed on all systems by default. +* It can be built from source using `cmake`. + +To visualize what’s going on here, let’s take a look at the manifest for the +relevant Cargo package that links to the native C library. + +```toml +[package] +name = "libgit2-sys" +version = "0.1.0" +authors = ["..."] +links = "git2" +build = "build.rs" + +[dependencies] +libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" } + +[target.'cfg(unix)'.dependencies] +openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" } + +# ... +``` + +As the above manifests show, we’ve got a `build` script specified, but it’s +worth noting that this example has a `links` entry which indicates that the +crate (`libgit2-sys`) links to the `git2` native library. + +Here we also see that we chose to have the Rust crate have an unconditional +dependency on `libssh2` via the `libssh2-sys` crate, as well as a +platform-specific dependency on `openssl-sys` for \*nix (other variants elided +for now). It may seem a little counterintuitive to express *C dependencies* in +the *Cargo manifest*, but this is actually using one of Cargo’s conventions in +this space. + +## `*-sys` Packages + +To alleviate linking to system libraries, Cargo has a *convention* of package +naming and functionality. Any package named `foo-sys` will provide two major +pieces of functionality: + +* The library crate will link to the native library `libfoo`. This will often + probe the current system for `libfoo` before resorting to building from + source. +* The library crate will provide **declarations** for functions in `libfoo`, + but it does **not** provide bindings or higher-level abstractions. + +The set of `*-sys` packages provides a common set of dependencies for linking +to native libraries. There are a number of benefits earned from having this +convention of native-library-related packages: + +* Common dependencies on `foo-sys` alleviates the above rule about one package + per value of `links`. +* A common dependency allows centralizing logic on discovering `libfoo` itself + (or building it from source). +* These dependencies are easily overridable. + +## Building libgit2 + +Now that we’ve got libgit2’s dependencies sorted out, we need to actually write +the build script. We’re not going to look at specific snippets of code here and +instead only take a look at the high-level details of the build script of +`libgit2-sys`. This is not recommending all packages follow this strategy, but +rather just outlining one specific strategy. + +The first step of the build script should do is to query whether libgit2 is +already installed on the host system. To do this we’ll leverage the preexisting +tool `pkg-config` (when its available). We’ll also use a `build-dependencies` +section to refactor out all the `pkg-config` related code (or someone’s already +done that!). + +If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t +installed, the next step is to build libgit2 from bundled source code +(distributed as part of `libgit2-sys` itself). There are a few nuances when +doing so that we need to take into account, however: + +* The build system of libgit2, `cmake`, needs to be able to find libgit2’s + optional dependency of libssh2. We’re sure we’ve already built it (it’s a + Cargo dependency), we just need to communicate this information. To do this + we leverage the metadata format to communicate information between build + scripts. In this example the libssh2 package printed out `cargo:root=...` to + tell us where libssh2 is installed at, and we can then pass this along to + cmake with the `CMAKE_PREFIX_PATH` environment variable. + +* We’ll need to handle some `CFLAGS` values when compiling C code (and tell + `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit + code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well. + +* Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR` + environment variable, and then we’ll print the necessary metadata to instruct + rustc how to link to libgit2. + +Most of the functionality of this build script is easily refactorable into +common dependencies, so our build script isn’t quite as intimidating as this +descriptions! In reality it’s expected that build scripts are quite succinct by +farming logic such as above to build dependencies. diff --git a/collector/compile-benchmarks/cargo/src/doc/config.md b/collector/compile-benchmarks/cargo/src/doc/config.md new file mode 100644 index 000000000..46fe401b4 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/config.md @@ -0,0 +1,138 @@ +% Configuration + +This document will explain how Cargo’s configuration system works, as well as +available keys or configuration. For configuration of a project through its +manifest, see the [manifest format](manifest.html). + +# Hierarchical structure + +Cargo allows local configuration for a particular project as well as global +configuration, like git. Cargo extends this to a hierarchical strategy. +If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the +following configuration files would be probed for and unified in this order: + +* `/projects/foo/bar/baz/.cargo/config` +* `/projects/foo/bar/.cargo/config` +* `/projects/foo/.cargo/config` +* `/projects/.cargo/config` +* `/.cargo/config` +* `$HOME/.cargo/config` + +With this structure, you can specify configuration per-project, and even +possibly check it into version control. You can also specify personal defaults +with a configuration file in your home directory. + +# Configuration format + +All configuration is currently in the [TOML format][toml] (like the manifest), +with simple key-value pairs inside of sections (tables) which all get merged +together. + +[toml]: https://github.com/toml-lang/toml + +# Configuration keys + +All of the following keys are optional, and their defaults are listed as their +value unless otherwise noted. + +Key values that specify a tool may be given as an absolute path, a relative path +or as a pathless tool name. Absolute paths and pathless tool names are used as +given. Relative paths are resolved relative to the parent directory of the +`.cargo` directory of the config file that the value resides within. + +```toml +# An array of paths to local repositories which are to be used as overrides for +# dependencies. For more information see the Specifying Dependencies guide. +paths = ["/path/to/override"] + +[cargo-new] +# This is your name/email to place in the `authors` section of a new Cargo.toml +# that is generated. If not present, then `git` will be probed, and if that is +# not present then `$USER` and `$EMAIL` will be used. +name = "..." +email = "..." + +# By default `cargo new` will initialize a new Git repository. This key can be +# set to `hg` to create a Mercurial repository, or `none` to disable this +# behavior. +vcs = "none" + +# For the following sections, $triple refers to any valid target triple, not the +# literal string "$triple", and it will apply whenever that target triple is +# being compiled to. 'cfg(...)' refers to the Rust-like `#[cfg]` syntax for +# conditional compilation. +[target.$triple] +# This is the linker which is passed to rustc (via `-C linker=`) when the `$triple` +# is being compiled for. By default this flag is not passed to the compiler. +linker = ".." +# Same but for the library archiver which is passed to rustc via `-C ar=`. +ar = ".." +# If a runner is provided, compiled targets for the `$triple` will be executed +# by invoking the specified runner executable with actual target as first argument. +# This applies to `cargo run`, `cargo test` and `cargo bench` commands. +# By default compiled targets are executed directly. +runner = ".." +# custom flags to pass to all compiler invocations that target $triple +# this value overrides build.rustflags when both are present +rustflags = ["..", ".."] + +[target.'cfg(...)'] +# Similar for the $triple configuration, but using the `cfg` syntax. +# If several `cfg` and $triple targets are candidates, then the rustflags +# are concatenated. The `cfg` syntax only applies to rustflags, and not to +# linker. +rustflags = ["..", ".."] + +# Configuration keys related to the registry +[registry] +index = "..." # URL of the registry index (defaults to the central repository) +token = "..." # Access token (found on the central repo’s website) + +[http] +proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none) + # in libcurl format, e.g. "socks5h://host:port" +timeout = 60000 # Timeout for each HTTP request, in milliseconds +cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional) +check-revoke = true # Indicates whether SSL certs are checked for revocation + +[build] +jobs = 1 # number of parallel jobs, defaults to # of CPUs +rustc = "rustc" # the rust compiler tool +rustdoc = "rustdoc" # the doc generator tool +target = "triple" # build for the target triple +target-dir = "target" # path of where to place all generated artifacts +rustflags = ["..", ".."] # custom flags to pass to all compiler invocations + +[term] +verbose = false # whether cargo provides verbose output +color = 'auto' # whether cargo colorizes output + +# Network configuration +[net] +retry = 2 # number of times a network call will automatically retried + +# Alias cargo commands. The first 3 aliases are built in. If your +# command requires grouped whitespace use the list format. +[alias] +b = "build" +t = "test" +r = "run" +rr = "run --release" +space_example = ["run", "--release", "--", "\"command list\""] +``` + +# Environment variables + +Cargo can also be configured through environment variables in addition to the +TOML syntax above. For each configuration key above of the form `foo.bar` the +environment variable `CARGO_FOO_BAR` can also be used to define the value. For +example the `build.jobs` key can also be defined by `CARGO_BUILD_JOBS`. + +Environment variables will take precedent over TOML configuration, and currently +only integer, boolean, and string keys are supported to be defined by +environment variables. + +In addition to the system above, Cargo recognizes a few other specific +[environment variables][env]. + +[env]: environment-variables.html diff --git a/collector/compile-benchmarks/cargo/src/doc/crates-io.md b/collector/compile-benchmarks/cargo/src/doc/crates-io.md new file mode 100644 index 000000000..384151b74 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/crates-io.md @@ -0,0 +1,222 @@ +% Publishing on crates.io + +Once you've got a library that you'd like to share with the world, it's time to +publish it on [crates.io]! Publishing a crate is when a specific +version is uploaded to be hosted on [crates.io]. + +Take care when publishing a crate, because a publish is **permanent**. The +version can never be overwritten, and the code cannot be deleted. There is no +limit to the number of versions which can be published, however. + +# Before your first publish + +First thing’s first, you’ll need an account on [crates.io] to acquire +an API token. To do so, [visit the home page][crates.io] and log in via a GitHub +account (required for now). After this, visit your [Account +Settings](https://crates.io/me) page and run the `cargo login` command +specified. + +```notrust +$ cargo login abcdefghijklmnopqrstuvwxyz012345 +``` + +This command will inform Cargo of your API token and store it locally in your +`~/.cargo/credentials` (previously it was `~/.cargo/config`). Note that this +token is a **secret** and should not be shared with anyone else. If it leaks for +any reason, you should regenerate it immediately. + +# Before publishing a new crate + +Keep in mind that crate names on [crates.io] are allocated on a first-come-first- +serve basis. Once a crate name is taken, it cannot be used for another crate. + +## Packaging a crate + +The next step is to package up your crate into a format that can be uploaded to +[crates.io]. For this we’ll use the `cargo package` subcommand. This will take +our entire crate and package it all up into a `*.crate` file in the +`target/package` directory. + +```notrust +$ cargo package +``` + +As an added bonus, the `*.crate` will be verified independently of the current +source tree. After the `*.crate` is created, it’s unpacked into +`target/package` and then built from scratch to ensure that all necessary files +are there for the build to succeed. This behavior can be disabled with the +`--no-verify` flag. + +Now’s a good time to take a look at the `*.crate` file to make sure you didn’t +accidentally package up that 2GB video asset, or large data files used for code +generation, integration tests, or benchmarking. There is currently a 10MB +upload size limit on `*.crate` files. So, if the size of `tests` and `benches` +directories and their dependencies are up to a couple of MBs, you can keep them +in your package; otherwsie, better to exclude them. + +Cargo will automatically ignore files ignored by your version control system +when packaging, but if you want to specify an extra set of files to ignore you +can use the `exclude` key in the manifest: + +```toml +[package] +# ... +exclude = [ + "public/assets/*", + "videos/*", +] +``` + +The syntax of each element in this array is what +[rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you’d rather +roll with a whitelist instead of a blacklist, Cargo also supports an `include` +key, which if set, overrides the `exclude` key: + +```toml +[package] +# ... +include = [ + "**/*.rs", + "Cargo.toml", +] +``` + +## Uploading the crate + +Now that we’ve got a `*.crate` file ready to go, it can be uploaded to +[crates.io] with the `cargo publish` command. And that’s it, you’ve now published +your first crate! + +```notrust +$ cargo publish +``` + +If you’d like to skip the `cargo package` step, the `cargo publish` subcommand +will automatically package up the local crate if a copy isn’t found already. + +Be sure to check out the [metadata you can +specify](manifest.html#package-metadata) to ensure your crate can be discovered +more easily! + +# Publishing a new version of an existing crate + +In order to release a new version, change the `version` value specified in your +`Cargo.toml` manifest. Keep in mind [the semver +rules](manifest.html#the-version-field). Then optionally run `cargo package` if +you want to inspect the `*.crate` file for the new version before publishing, +and run `cargo publish` to upload the new version. + +# Managing a crates.io-based crate + +Management of crates is primarily done through the command line `cargo` tool +rather than the [crates.io] web interface. For this, there are a few subcommands +to manage a crate. + +## `cargo yank` + +Occasions may arise where you publish a version of a crate that actually ends up +being broken for one reason or another (syntax error, forgot to include a file, +etc.). For situations such as this, Cargo supports a “yank” of a version of a +crate. + +```notrust +$ cargo yank --vers 1.0.1 +$ cargo yank --vers 1.0.1 --undo +``` + +A yank **does not** delete any code. This feature is not intended for deleting +accidentally uploaded secrets, for example. If that happens, you must reset +those secrets immediately. + +The semantics of a yanked version are that no new dependencies can be created +against that version, but all existing dependencies continue to work. One of the +major goals of [crates.io] is to act as a permanent archive of crates that does +not change over time, and allowing deletion of a version would go against this +goal. Essentially a yank means that all projects with a `Cargo.lock` will not +break, while any future `Cargo.lock` files generated will not list the yanked +version. + +## `cargo owner` + +A crate is often developed by more than one person, or the primary maintainer +may change over time! The owner of a crate is the only person allowed to publish +new versions of the crate, but an owner may designate additional owners. + +```notrust +$ cargo owner --add my-buddy +$ cargo owner --remove my-buddy +$ cargo owner --add github:rust-lang:owners +$ cargo owner --remove github:rust-lang:owners +``` + +The owner IDs given to these commands must be GitHub user names or GitHub teams. + +If a user name is given to `--add`, that user becomes a “named” owner, with +full rights to the crate. In addition to being able to publish or yank versions +of the crate, they have the ability to add or remove owners, *including* the +owner that made *them* an owner. Needless to say, you shouldn’t make people you +don’t fully trust into a named owner. In order to become a named owner, a user +must have logged into [crates.io] previously. + +If a team name is given to `--add`, that team becomes a “team” owner, with +restricted right to the crate. While they have permission to publish or yank +versions of the crate, they *do not* have the ability to add or remove owners. +In addition to being more convenient for managing groups of owners, teams are +just a bit more secure against owners becoming malicious. + +The syntax for teams is currently `github:org:team` (see examples above). +In order to add a team as an owner one must be a member of that team. No +such restriction applies to removing a team as an owner. + +## GitHub permissions + +Team membership is not something GitHub provides simple public access to, and it +is likely for you to encounter the following message when working with them: + +> It looks like you don’t have permission to query a necessary property from +GitHub to complete this request. You may need to re-authenticate on [crates.io] +to grant permission to read GitHub org memberships. Just go to +https://crates.io/login + +This is basically a catch-all for “you tried to query a team, and one of the +five levels of membership access control denied this”. That is not an +exaggeration. GitHub’s support for team access control is Enterprise Grade. + +The most likely cause of this is simply that you last logged in before this +feature was added. We originally requested *no* permissions from GitHub when +authenticating users, because we didn’t actually ever use the user’s token for +anything other than logging them in. However to query team membership on your +behalf, we now require +[the `read:org` scope](https://developer.github.com/v3/oauth/#scopes). + +You are free to deny us this scope, and everything that worked before teams +were introduced will keep working. However you will never be able to add a team +as an owner, or publish a crate as a team owner. If you ever attempt to do this, +you will get the error above. You may also see this error if you ever try to +publish a crate that you don’t own at all, but otherwise happens to have a team. + +If you ever change your mind, or just aren’t sure if [crates.io] has sufficient +permission, you can always go to https://crates.io/login, which will prompt you +for permission if [crates.io] doesn’t have all the scopes it would like to. + +An additional barrier to querying GitHub is that the organization may be +actively denying third party access. To check this, you can go to: + + https://github.com/organizations/:org/settings/oauth_application_policy + +where `:org` is the name of the organization (e.g. rust-lang). You may see +something like: + +![Organization Access Control](images/org-level-acl.png) + +Where you may choose to explicitly remove [crates.io] from your organization’s +blacklist, or simply press the “Remove Restrictions” button to allow all third +party applications to access this data. + +Alternatively, when [crates.io] requested the `read:org` scope, you could have +explicitly whitelisted [crates.io] querying the org in question by pressing +the “Grant Access” button next to its name: + +![Authentication Access Control](images/auth-level-acl.png) + +[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo/src/doc/environment-variables.md b/collector/compile-benchmarks/cargo/src/doc/environment-variables.md new file mode 100644 index 000000000..6fe532d61 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/environment-variables.md @@ -0,0 +1,131 @@ +% Environment Variables + +Cargo sets and reads a number of environment variables which your code can detect +or override. Here is a list of the variables Cargo sets, organized by when it interacts +with them: + +# Environment variables Cargo reads + +You can override these environment variables to change Cargo's behavior on your +system: + +* `CARGO_HOME` - Cargo maintains a local cache of the registry index and of git + checkouts of crates. By default these are stored under `$HOME/.cargo`, but + this variable overrides the location of this directory. Once a crate is cached + it is not removed by the clean command. +* `CARGO_TARGET_DIR` - Location of where to place all generated artifacts, + relative to the current working directory. +* `RUSTC` - Instead of running `rustc`, Cargo will execute this specified + compiler instead. +* `RUSTC_WRAPPER` - Instead of simply running `rustc`, Cargo will execute this + specified wrapper instead, passing as its commandline arguments the rustc + invocation, with the first argument being rustc. +* `RUSTDOC` - Instead of running `rustdoc`, Cargo will execute this specified + `rustdoc` instance instead. +* `RUSTDOCFLAGS` - A space-separated list of custom flags to pass to all `rustdoc` + invocations that Cargo performs. In contrast with `cargo rustdoc`, this is + useful for passing a flag to *all* `rustdoc` instances. +* `RUSTFLAGS` - A space-separated list of custom flags to pass to all compiler + invocations that Cargo performs. In contrast with `cargo rustc`, this is + useful for passing a flag to *all* compiler instances. + +Note that Cargo will also read environment variables for `.cargo/config` +configuration values, as described in [that documentation][config-env] + +[config-env]: config.html#environment-variables + +# Environment variables Cargo sets for crates + +Cargo exposes these environment variables to your crate when it is compiled. +Note that this applies for test binaries as well. +To get the value of any of these variables in a Rust program, do this: + +``` +let version = env!("CARGO_PKG_VERSION"); +``` + +`version` will now contain the value of `CARGO_PKG_VERSION`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest of your package. +* `CARGO_PKG_VERSION` - The full version of your package. +* `CARGO_PKG_VERSION_MAJOR` - The major version of your package. +* `CARGO_PKG_VERSION_MINOR` - The minor version of your package. +* `CARGO_PKG_VERSION_PATCH` - The patch version of your package. +* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. +* `CARGO_PKG_AUTHORS` - Colon separated list of authors from the manifest of your package. +* `CARGO_PKG_NAME` - The name of your package. +* `CARGO_PKG_DESCRIPTION` - The description of your package. +* `CARGO_PKG_HOMEPAGE` - The home page of your package. +* `OUT_DIR` - If the package has a build script, this is set to the folder where the build + script should place its output. See below for more information. + +# Environment variables Cargo sets for build scripts + +Cargo sets several environment variables when build scripts are run. Because these variables +are not yet set when the build script is compiled, the above example using `env!` won't work +and instead you'll need to retrieve the values when the build script is run: + +``` +use std::env; +let out_dir = env::var("OUT_DIR").unwrap(); +``` + +`out_dir` will now contain the value of `OUT_DIR`. + +* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package + being built (the package containing the build + script). Also note that this is the value of the + current working directory of the build script when it + starts. +* `CARGO_MANIFEST_LINKS` - the manifest `links` value. +* `CARGO_FEATURE_` - For each activated feature of the package being + built, this environment variable will be present + where `` is the name of the feature uppercased + and having `-` translated to `_`. +* `CARGO_CFG_` - For each [configuration option][configuration] of the + package being built, this environment variable will + contain the value of the configuration, where `` is + the name of the configuration uppercased and having `-` + translated to `_`. + Boolean configurations are present if they are set, and + not present otherwise. + Configurations with multiple values are joined to a + single variable with the values delimited by `,`. +* `OUT_DIR` - the folder in which all output should be placed. This folder is + inside the build directory for the package being built, and it is + unique for the package in question. +* `TARGET` - the target triple that is being compiled for. Native code should be + compiled for this triple. Some more information about target + triples can be found in [clang’s own documentation][clang]. +* `HOST` - the host triple of the rust compiler. +* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can + be useful to pass a `-j` parameter to a system like `make`. Note + that care should be taken when interpreting this environment + variable. For historical purposes this is still provided but + recent versions of Cargo, for example, do not need to run `make + -j` as it'll automatically happen. Cargo implements its own + [jobserver] and will allow build scripts to inherit this + information, so programs compatible with GNU make jobservers will + already have appropriately configured parallelism. +* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the + profile currently being built. +* `PROFILE` - `release` for release builds, `debug` for other builds. +* `DEP__` - For more information about this set of environment + variables, see build script documentation about [`links`][links]. +* `RUSTC`, `RUSTDOC` - the compiler and documentation generator that Cargo has + resolved to use, passed to the build script so it might + use it as well. + +[links]: build-script.html#the-links-manifest-key +[profile]: manifest.html#the-profile-sections +[configuration]: https://doc.rust-lang.org/reference/attributes.html#conditional-compilation +[clang]:http://clang.llvm.org/docs/CrossCompilation.html#target-triple +[jobserver]: http://make.mad-scientist.net/papers/jobserver-implementation/ + +# Environment variables Cargo sets for 3rd party subcommands + +Cargo exposes this environment variable to 3rd party subcommands +(ie. programs named `cargo-foobar` placed in `$PATH`): + +* `CARGO` - Path to the `cargo` binary performing the build. diff --git a/collector/compile-benchmarks/cargo/src/doc/external-tools.md b/collector/compile-benchmarks/cargo/src/doc/external-tools.md new file mode 100644 index 000000000..8afb762fe --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/external-tools.md @@ -0,0 +1,103 @@ +% External tools + +One of the goals of Cargo is simple integration with third-party tools, like +IDEs and other build systems. To make integration easier, Cargo has several +facilities: + +* a `cargo metadata` command, which outputs project structure and dependencies + information in JSON, + +* a `--message-format` flag, which outputs information about a particular build, + and + +* support for custom subcommands. + + +# Information about project structure + +You can use `cargo metadata` command to get information about project structure +and dependencies. The output of the command looks like this: + +```text +{ + // Integer version number of the format. + "version": integer, + + // List of packages for this workspace, including dependencies. + "packages": [ + { + // Opaque package identifier. + "id": PackageId, + + "name": string, + + "version": string, + + "source": SourceId, + + // A list of declared dependencies, see `resolve` field for actual dependencies. + "dependencies": [ Dependency ], + + "targets: [ Target ], + + // Path to Cargo.toml + "manifest_path": string, + } + ], + + "workspace_members": [ PackageId ], + + // Dependencies graph. + "resolve": { + "nodes": [ + { + "id": PackageId, + "dependencies": [ PackageId ] + } + ] + } +} +``` + +The format is stable and versioned. When calling `cargo metadata`, you should +pass `--format-version` flag explicitly to avoid forward incompatibility +hazard. + +If you are using Rust, there is [cargo_metadata] crate. + +[cargo_metadata]: https://crates.io/crates/cargo_metadata + + +# Information about build + +When passing `--message-format=json`, Cargo will output the following +information during the build: + +* compiler errors and warnings, + +* produced artifacts, + +* results of the build scripts (for example, native dependencies). + +The output goes to stdout in the JSON object per line format. The `reason` field +distinguishes different kinds of messages. + +Information about dependencies in the Makefile-compatible format is stored in +the `.d` files alongside the artifacts. + + +# Custom subcommands + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. This is achieved by translating a cargo invocation of the form +cargo `(?[^ ]+)` into an invocation of an external tool +`cargo-${command}` that then needs to be present in one of the user's `$PATH` +directories. + +Custom subcommand may use `CARGO` environment variable to call back to +Cargo. Alternatively, it can link to `cargo` crate as a library, but this +approach has drawbacks: + +* Cargo as a library is unstable, API changes without deprecation, + +* versions of Cargo library and Cargo binary may be different. diff --git a/collector/compile-benchmarks/cargo/src/doc/faq.md b/collector/compile-benchmarks/cargo/src/doc/faq.md new file mode 100644 index 000000000..cf92bbe2c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/faq.md @@ -0,0 +1,193 @@ +% Frequently Asked Questions + +# Is the plan to use GitHub as a package repository? + +No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with +npmjs.org and rubygems.org. + +We plan to support git repositories as a source of packages forever, +because they can be used for early development and temporary patches, +even when people use the registry as the primary source of packages. + +# Why build crates.io rather than use GitHub as a registry? + +We think that it’s very important to support multiple ways to download +packages, including downloading from GitHub and copying packages into +your project itself. + +That said, we think that [crates.io] offers a number of important benefits, and +will likely become the primary way that people download packages in Cargo. + +For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a +central registry model as well as a Git-based model, and most packages +are downloaded through the registry in those ecosystems, with an +important minority of packages making use of git-based packages. + +[1]: https://www.npmjs.org +[2]: https://bundler.io + +Some of the advantages that make a central registry popular in other +languages include: + +* **Discoverability**. A central registry provides an easy place to look + for existing packages. Combined with tagging, this also makes it + possible for a registry to provide ecosystem-wide information, such as a + list of the most popular or most-depended-on packages. +* **Speed**. A central registry makes it possible to easily fetch just + the metadata for packages quickly and efficiently, and then to + efficiently download just the published package, and not other bloat + that happens to exist in the repository. This adds up to a significant + improvement in the speed of dependency resolution and fetching. As + dependency graphs scale up, downloading all of the git repositories bogs + down fast. Also remember that not everybody has a high-speed, + low-latency Internet connection. + +# Will Cargo work with C code (or other languages)? + +Yes! + +Cargo handles compiling Rust code, but we know that many Rust projects +link against C code. We also know that there are decades of tooling +built up around compiling languages other than Rust. + +Our solution: Cargo allows a package to [specify a script](build-script.html) +(written in Rust) to run before invoking `rustc`. Rust is leveraged to +implement platform-specific configuration and refactor out common build +functionality among packages. + +# Can Cargo be used inside of `make` (or `ninja`, or ...) + +Indeed. While we intend Cargo to be useful as a standalone way to +compile Rust projects at the top-level, we know that some people will +want to invoke Cargo from other build tools. + +We have designed Cargo to work well in those contexts, paying attention +to things like error codes and machine-readable output modes. We still +have some work to do on those fronts, but using Cargo in the context of +conventional scripts is something we designed for from the beginning and +will continue to prioritize. + +# Does Cargo handle multi-platform projects or cross-compilation? + +Rust itself provides facilities for configuring sections of code based +on the platform. Cargo also supports [platform-specific +dependencies][target-deps], and we plan to support more per-platform +configuration in `Cargo.toml` in the future. + +[target-deps]: manifest.html#the-dependencies-section + +In the longer-term, we’re looking at ways to conveniently cross-compile +projects using Cargo. + +# Does Cargo support environments, like `production` or `test`? + +We support environments through the use of [profiles][profile] to support: + +[profile]: manifest.html#the-profile-sections + +* environment-specific flags (like `-g --opt-level=0` for development + and `--opt-level=3` for production). +* environment-specific dependencies (like `hamcrest` for test assertions). +* environment-specific `#[cfg]` +* a `cargo test` command + +# Does Cargo work on Windows? + +Yes! + +All commits to Cargo are required to pass the local test suite on Windows. +If, however, you find a Windows issue, we consider it a bug, so [please file an +issue][3]. + +[3]: https://github.com/rust-lang/cargo/issues + +# Why do binaries have `Cargo.lock` in version control, but not libraries? + +The purpose of a `Cargo.lock` is to describe the state of the world at the time +of a successful build. It is then used to provide deterministic builds across +whatever machine is building the project by ensuring that the exact same +dependencies are being compiled. + +This property is most desirable from applications and projects which are at the +very end of the dependency chain (binaries). As a result, it is recommended that +all binaries check in their `Cargo.lock`. + +For libraries the situation is somewhat different. A library is not only used by +the library developers, but also any downstream consumers of the library. Users +dependent on the library will not inspect the library’s `Cargo.lock` (even if it +exists). This is precisely because a library should **not** be deterministically +recompiled for all users of the library. + +If a library ends up being used transitively by several dependencies, it’s +likely that just a single copy of the library is desired (based on semver +compatibility). If all libraries were to check in their `Cargo.lock`, then +multiple copies of the library would be used, and perhaps even a version +conflict. + +In other words, libraries specify semver requirements for their dependencies but +cannot see the full picture. Only end products like binaries have a full +picture to decide what versions of dependencies should be used. + +# Can libraries use `*` as a version for their dependencies? + +**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) +with wildcard dependency constraints.** + +While libraries _can_, strictly speaking, they should not. A version requirement +of `*` says “This will work with every version ever,” which is never going +to be true. Libraries should always specify the range that they do work with, +even if it’s something as general as “every 1.x.y version.” + +# Why `Cargo.toml`? + +As one of the most frequent interactions with Cargo, the question of why the +configuration file is named `Cargo.toml` arises from time to time. The leading +capital-`C` was chosen to ensure that the manifest was grouped with other +similar configuration files in directory listings. Sorting files often puts +capital letters before lowercase letters, ensuring files like `Makefile` and +`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize +the fact that the file is in the [TOML configuration +format](https://github.com/toml-lang/toml). + +Cargo does not allow other names such as `cargo.toml` or `Cargofile` to +emphasize the ease of how a Cargo repository can be identified. An option of +many possible names has historically led to confusion where one case was handled +but others were accidentally forgotten. + +[crates.io]: https://crates.io/ + +# How can Cargo work offline? + +Cargo is often used in situations with limited or no network access such as +airplanes, CI environments, or embedded in large production deployments. Users +are often surprised when Cargo attempts to fetch resources from the network, and +hence the request for Cargo to work offline comes up frequently. + +Cargo, at its heart, will not attempt to access the network unless told to do +so. That is, if no crates comes from crates.io, a git repository, or some other +network location, Cargo will never attempt to make a network connection. As a +result, if Cargo attempts to touch the network, then it's because it needs to +fetch a required resource. + +Cargo is also quite aggressive about caching information to minimize the amount +of network activity. It will guarantee, for example, that if `cargo build` (or +an equivalent) is run to completion then the next `cargo build` is guaranteed to +not touch the network so long as `Cargo.toml` has not been modified in the +meantime. This avoidance of the network boils down to a `Cargo.lock` existing +and a populated cache of the crates reflected in the lock file. If either of +these components are missing, then they're required for the build to succeed and +must be fetched remotely. + +As of Rust 1.11.0 Cargo understands a new flag, `--frozen`, which is an +assertion that it shouldn't touch the network. When passed, Cargo will +immediately return an error if it would otherwise attempt a network request. +The error should include contextual information about why the network request is +being made in the first place to help debug as well. Note that this flag *does +not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch +the network as a previous command has been run to ensure that network activity +shouldn't be necessary. + +For more information about vendoring, see documentation on [source +replacement][replace]. + +[replace]: source-replacement.html diff --git a/collector/compile-benchmarks/cargo/src/doc/favicon.ico b/collector/compile-benchmarks/cargo/src/doc/favicon.ico new file mode 100644 index 000000000..a91ad692c Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/favicon.ico differ diff --git a/collector/compile-benchmarks/cargo/src/doc/footer.html b/collector/compile-benchmarks/cargo/src/doc/footer.html new file mode 100644 index 000000000..c2eff8f40 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/footer.html @@ -0,0 +1,11 @@ + +

+ + + diff --git a/collector/compile-benchmarks/cargo/src/doc/guide.md b/collector/compile-benchmarks/cargo/src/doc/guide.md new file mode 100644 index 000000000..e63be4dd5 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/guide.md @@ -0,0 +1,446 @@ +% Cargo Guide + +Welcome to the Cargo guide. This guide will give you all that you need to know +about how to use Cargo to develop Rust projects. + +# Why Cargo exists + +Cargo is a tool that allows Rust projects to declare their various +dependencies and ensure that you’ll always get a repeatable build. + +To accomplish this goal, Cargo does four things: + +* Introduces two metadata files with various bits of project information. +* Fetches and builds your project’s dependencies. +* Invokes `rustc` or another build tool with the correct parameters to build + your project. +* Introduces conventions to make working with Rust projects easier. + +# Creating a new project + +To start a new project with Cargo, use `cargo new`: + +```shell +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d leave it off. This also initializes a new `git` +repository by default. If you don't want it to do that, pass `--vcs none`. + +Let’s check out what Cargo has generated for us: + +```shell +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +If we had just used `cargo new hello_world` without the `--bin` flag, then +we would have a `lib.rs` instead of a `main.rs`. For now, however, this is all +we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```shell +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```shell +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step (You +won't see the `Compiling` line if you have not made any changes since you last +compiled): + +```shell +$ cargo run + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/debug/hello_world` +Hello, world! +``` + +You’ll now notice a new file, `Cargo.lock`. It contains information about our +dependencies. Since we don’t have any yet, it’s not very interesting. + +Once you’re ready for release, you can use `cargo build --release` to compile +your files with optimizations turned on: + +```shell +$ cargo build --release + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +`cargo build --release` puts the resulting binary in `target/release` instead of +`target/debug`. + +Compiling in debug mode is the default for development-- compilation time is +shorter since the compiler doesn't do optimizations, but the code will run +slower. Release mode takes longer to compile, but the code will run faster. + +# Working on an existing Cargo project + +If you download an existing project that uses Cargo, it’s really easy +to get going. + +First, get the project from somewhere. In this example, we’ll use `rand` +cloned from its repository on GitHub: + +```shell +$ git clone https://github.com/rust-lang-nursery/rand.git +$ cd rand +``` + +To build, use `cargo build`: + +```shell +$ cargo build + Compiling rand v0.1.0 (file:///path/to/project/rand) +``` + +This will fetch all of the dependencies and then build them, along with the +project. + +# Adding dependencies from crates.io + +[crates.io] is the Rust community's central package registry that serves as a +location to discover and download packages. `cargo` is configured to use it by +default to find requested packages. + +To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. + +[crates.io]: https://crates.io/ + +## Adding a dependency + +If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that, +then list the crate name and version that you would like to use. This example +adds a dependency of the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The version string is a [semver] version requirement. The [specifying +dependencies](specifying-dependencies.html) docs have more information about +the options you have here. + +[semver]: https://github.com/steveklabnik/semver#requirements + +If we also wanted to add a dependency on the `regex` crate, we would not need +to add `[dependencies]` for each crate listed. Here's what your whole +`Cargo.toml` file would look like with dependencies on the `time` and `regex` +crates: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +time = "0.1.12" +regex = "0.1.41" +``` + +Re-run `cargo build`, and Cargo will fetch the new dependencies and all of +their dependencies, compile them all, and update the `Cargo.lock`: + +```shell +$ cargo build + Updating registry `https://github.com/rust-lang/crates.io-index` + Downloading memchr v0.1.5 + Downloading libc v0.1.10 + Downloading regex-syntax v0.2.1 + Downloading memchr v0.1.5 + Downloading aho-corasick v0.3.0 + Downloading regex v0.1.41 + Compiling memchr v0.1.5 + Compiling libc v0.1.10 + Compiling regex-syntax v0.2.1 + Compiling memchr v0.1.5 + Compiling aho-corasick v0.3.0 + Compiling regex v0.1.41 + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +Our `Cargo.lock` contains the exact information about which revision of all of +these dependencies we used. + +Now, if `regex` gets updated, we will still build with the same revision until +we choose to `cargo update`. + +You can now use the `regex` library using `extern crate` in `main.rs`. + +```rust +extern crate regex; + +use regex::Regex; + +fn main() { + let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); + println!("Did our date match? {}", re.is_match("2014-01-01")); +} +``` + +Running it will show: + +```shell +$ cargo run + Running `target/hello_world` +Did our date match? true +``` +# Project layout + +Cargo uses conventions for file placement to make it easy to dive into a new +Cargo project: + +```shell +. +├── Cargo.lock +├── Cargo.toml +├── benches +│   └── large-input.rs +├── examples +│   └── simple.rs +├── src +│   ├── bin +│   │   └── another_executable.rs +│   ├── lib.rs +│   └── main.rs +└── tests + └── some-integration-tests.rs +``` + +* `Cargo.toml` and `Cargo.lock` are stored in the root of your project (*package + root*). +* Source code goes in the `src` directory. +* The default library file is `src/lib.rs`. +* The default executable file is `src/main.rs`. +* Other executables can be placed in `src/bin/*.rs`. +* Integration tests go in the `tests` directory (unit tests go in each file + they're testing). +* Examples go in the `examples` directory. +* Benchmarks go in the `benches` directory. + +These are explained in more detail in the [manifest +description](manifest.html#the-project-layout). + +# Cargo.toml vs Cargo.lock + +`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk +about them, here’s a summary: + +* `Cargo.toml` is about describing your dependencies in a broad sense, and is + written by you. +* `Cargo.lock` contains exact information about your dependencies. It is + maintained by Cargo and should not be manually edited. + +If you’re building a library that other projects will depend on, put +`Cargo.lock` in your `.gitignore`. If you’re building an executable like a +command-line tool or an application, check `Cargo.lock` into `git`. If you're +curious about why that is, see ["Why do binaries have `Cargo.lock` in version +control, but not libraries?" in the +FAQ](faq.html#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). + +Let’s dig in a little bit more. + +`Cargo.toml` is a **manifest** file in which we can specify a bunch of +different metadata about our project. For example, we can say that we depend +on another project: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +This project has a single dependency, on the `rand` library. We’ve stated in +this case that we’re relying on a particular Git repository that lives on +GitHub. Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. + +Sound good? Well, there’s one problem: If you build this project today, and +then you send a copy to me, and I build this project tomorrow, something bad +could happen. There could be more commits to `rand` in the meantime, and my +build would include new commits while yours would not. Therefore, we would +get different builds. This would be bad because we want reproducible builds. + +We could fix this problem by putting a `rev` line in our `Cargo.toml`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git", rev = "9f35b8e" } +``` + +Now our builds will be the same. But there’s a big drawback: now we have to +manually think about SHA-1s every time we want to update our library. This is +both tedious and error prone. + +Enter the `Cargo.lock`. Because of its existence, we don’t need to manually +keep track of the exact revisions: Cargo will do it for us. When we have a +manifest like this: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +Cargo will take the latest commit and write that information out into our +`Cargo.lock` when we build for the first time. That file will look like this: + +```toml +[[package]] +name = "hello_world" +version = "0.1.0" +dependencies = [ + "rand 0.1.0 (git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9)", +] + +[[package]] +name = "rand" +version = "0.1.0" +source = "git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9" +``` + +You can see that there’s a lot more information here, including the exact +revision we used to build. Now when you give your project to someone else, +they’ll use the exact same SHA, even though we didn’t specify it in our +`Cargo.toml`. + +When we’re ready to opt in to a new version of the library, Cargo can +re-calculate the dependencies and update things for us: + +```shell +$ cargo update # updates all dependencies +$ cargo update -p rand # updates just “rand” +``` + +This will write out a new `Cargo.lock` with the new version information. Note +that the argument to `cargo update` is actually a +[Package ID Specification](pkgid-spec.html) and `rand` is just a short +specification. + +# Tests + +Cargo can run your tests with the `cargo test` command. Cargo looks for tests +to run in two places: in each of your `src` files and any tests in `tests/`. +Tests in your `src` files should be unit tests, and tests in `tests/` should be +integration-style tests. As such, you’ll need to import your crates into +the files in `tests`. + +Here's an example of running `cargo test` in our project, which currently has +no tests: + +```shell +$ cargo test + Compiling rand v0.1.0 (https://github.com/rust-lang-nursery/rand.git#9f35b8e) + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running target/test/hello_world-9c2b65bbb79eabce + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` + +If our project had tests, we would see more output with the correct number of +tests. + +You can also run a specific test by passing a filter: + +```shell +$ cargo test foo +``` + +This will run any test with `foo` in its name. + +`cargo test` runs additional checks as well. For example, it will compile any +examples you’ve included and will also test the examples in your +documentation. Please see the [testing guide][testing] in the Rust +documentation for more details. + +[testing]: https://doc.rust-lang.org/book/testing.html + +## Travis CI + +To test your project on Travis CI, here is a sample `.travis.yml` file: + +```yaml +language: rust +rust: + - stable + - beta + - nightly +matrix: + allow_failures: + - rust: nightly +``` + +This will test all three release channels, but any breakage in nightly +will not fail your overall build. Please see the [Travis CI Rust +documentation](https://docs.travis-ci.com/user/languages/rust/) for more +information. + + +## Build cache + +Cargo shares build artifacts among all the packages of a single workspace. +Today, Cargo does not share build results across different workspaces, but +a similar result can be achieved by using a third party tool, [sccache]. + +To setup `sccache`, install it with `cargo install sccache` and set +`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. +If you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to +`.bashrc`. Refer to sccache documentation for more details. + +[sccache]: https://github.com/mozilla/sccache + + +# Further reading + +Now that you have an overview of how to use cargo and have created your first +crate, you may be interested in: + +* [Publishing your crate on crates.io](crates-io.html) +* [Reading about all the possible ways of specifying dependencies](specifying-dependencies.html) +* [Learning more details about what you can specify in your `Cargo.toml` manifest](manifest.html) + +Even more topics are available in the Docs menu at the top! diff --git a/collector/compile-benchmarks/cargo/src/doc/header.html b/collector/compile-benchmarks/cargo/src/doc/header.html new file mode 100644 index 000000000..614d1ba47 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/header.html @@ -0,0 +1,52 @@ + + + + + + +
diff --git a/collector/compile-benchmarks/cargo/src/doc/html-headers.html b/collector/compile-benchmarks/cargo/src/doc/html-headers.html new file mode 100644 index 000000000..80ad896f3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/html-headers.html @@ -0,0 +1,2 @@ + + diff --git a/collector/compile-benchmarks/cargo/src/doc/images/Cargo-Logo-Small.png b/collector/compile-benchmarks/cargo/src/doc/images/Cargo-Logo-Small.png new file mode 100644 index 000000000..e3a99208c Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/Cargo-Logo-Small.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/auth-level-acl.png b/collector/compile-benchmarks/cargo/src/doc/images/auth-level-acl.png new file mode 100644 index 000000000..e7bc25180 Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/auth-level-acl.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/circle-with-i.png b/collector/compile-benchmarks/cargo/src/doc/images/circle-with-i.png new file mode 100644 index 000000000..ef105af86 Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/circle-with-i.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/forkme.png b/collector/compile-benchmarks/cargo/src/doc/images/forkme.png new file mode 100644 index 000000000..29379046c Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/forkme.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/noise.png b/collector/compile-benchmarks/cargo/src/doc/images/noise.png new file mode 100644 index 000000000..6b748b5db Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/noise.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/org-level-acl.png b/collector/compile-benchmarks/cargo/src/doc/images/org-level-acl.png new file mode 100644 index 000000000..ed5aa882a Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/org-level-acl.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/images/search.png b/collector/compile-benchmarks/cargo/src/doc/images/search.png new file mode 100644 index 000000000..6cb18672a Binary files /dev/null and b/collector/compile-benchmarks/cargo/src/doc/images/search.png differ diff --git a/collector/compile-benchmarks/cargo/src/doc/index.md b/collector/compile-benchmarks/cargo/src/doc/index.md new file mode 100644 index 000000000..c85a6e33b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/index.md @@ -0,0 +1,111 @@ +% Cargo, Rust’s Package Manager + +# Installing + +### Install Stable Rust and Cargo + +The easiest way to get Cargo is to get the current stable release of [Rust] by +using the `rustup` script: + +```shell +$ curl -sSf https://static.rust-lang.org/rustup.sh | sh +``` + +After this, you can use the `rustup` command to also install `beta` or `nightly` +channels for Rust and Cargo. + +### Install Nightly Cargo + +To install just Cargo, the current recommended installation method is through +the official nightly builds. Note that Cargo will also require that [Rust] is +already installed on the system. + +| Platform | 64-bit | 32-bit | +|------------------|-------------------|-------------------| +| Linux binaries | [tar.gz][linux64] | [tar.gz][linux32] | +| MacOS binaries | [tar.gz][mac64] | [tar.gz][mac32] | +| Windows binaries | [tar.gz][win64] | [tar.gz][win32] | + +### Build and Install Cargo from Source + +Alternatively, you can [build Cargo from source][compiling-from-source]. + +[rust]: https://www.rust-lang.org/ +[linux64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-unknown-linux-gnu.tar.gz +[linux32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-unknown-linux-gnu.tar.gz +[mac64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-apple-darwin.tar.gz +[mac32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-apple-darwin.tar.gz +[win64]: https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-gnu.tar.gz +[win32]: https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-pc-windows-gnu.tar.gz +[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source + +# Let’s get started + +To start a new project with Cargo, use `cargo new`: + +```shell +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d leave it off. + +Let’s check out what Cargo has generated for us: + +```shell +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +This is all we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +``` +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```shell +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```shell +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step: + +```shell +$ cargo run + Fresh hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/hello_world` +Hello, world! +``` + +# Going further + +For more details on using Cargo, check out the [Cargo Guide](guide.html) diff --git a/collector/compile-benchmarks/cargo/src/doc/javascripts/all.js b/collector/compile-benchmarks/cargo/src/doc/javascripts/all.js new file mode 100644 index 000000000..3fb0eef24 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/javascripts/all.js @@ -0,0 +1,40 @@ +//= require_tree . + +Prism.languages.toml = { + // https://github.com/LeaVerou/prism/issues/307 + 'comment': [{ + pattern: /(^[^"]*?("[^"]*?"[^"]*?)*?[^"\\]*?)(\/\*[\w\W]*?\*\/|(^|[^:])#.*?(\r?\n|$))/g, + lookbehind: true + }], + 'string': /("|')(\\?.)*?\1/g, + 'number': /\d+/, + 'boolean': /true|false/, + 'toml-section': /\[.*\]/, + 'toml-key': /[\w-]+/ +}; + +$(function() { + var pres = document.querySelectorAll('pre.rust'); + for (var i = 0; i < pres.length; i++) { + pres[i].className += ' language-rust'; + } + + // Toggles docs menu + $('button.dropdown, a.dropdown').click(function(el, e) { + $(this).toggleClass('active').siblings('ul').toggleClass('open'); + + return false; + }); + + // A click in the page anywhere but in the menu will turn the menu off + $(document).on('click', function(e) { + // Checks to make sure the click did not come from inside dropdown menu + // if it doesn't we close the menu + // else, we do nothing and just follow the link + if (!$(e.target).closest('ul.dropdown').length) { + var toggles = $('button.dropdown.active, a.dropdown.active'); + toggles.toggleClass('active').siblings('ul').toggleClass('open'); + + } + }); +}); diff --git a/collector/compile-benchmarks/cargo/src/doc/javascripts/prism.js b/collector/compile-benchmarks/cargo/src/doc/javascripts/prism.js new file mode 100644 index 000000000..13c240783 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/javascripts/prism.js @@ -0,0 +1,6 @@ +/* http://prismjs.com/download.html?themes=prism&languages=markup+css+clike+javascript */ +self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{};var Prism=function(){var e=/\blang(?:uage)?-(?!\*)(\w+)\b/i,t=self.Prism={util:{encode:function(e){return e instanceof n?new n(e.type,t.util.encode(e.content),e.alias):"Array"===t.util.type(e)?e.map(t.util.encode):e.replace(/&/g,"&").replace(/e.length)break e;if(!(d instanceof a)){c.lastIndex=0;var m=c.exec(d);if(m){u&&(f=m[1].length);var y=m.index-1+f,m=m[0].slice(f),v=m.length,k=y+v,b=d.slice(0,y+1),w=d.slice(k+1),N=[p,1];b&&N.push(b);var O=new a(l,g?t.tokenize(m,g):m,h);N.push(O),w&&N.push(w),Array.prototype.splice.apply(r,N)}}}}}return r},hooks:{all:{},add:function(e,n){var a=t.hooks.all;a[e]=a[e]||[],a[e].push(n)},run:function(e,n){var a=t.hooks.all[e];if(a&&a.length)for(var r,i=0;r=a[i++];)r(n)}}},n=t.Token=function(e,t,n){this.type=e,this.content=t,this.alias=n};if(n.stringify=function(e,a,r){if("string"==typeof e)return e;if("[object Array]"==Object.prototype.toString.call(e))return e.map(function(t){return n.stringify(t,a,e)}).join("");var i={type:e.type,content:n.stringify(e.content,a,r),tag:"span",classes:["token",e.type],attributes:{},language:a,parent:r};if("comment"==i.type&&(i.attributes.spellcheck="true"),e.alias){var l="Array"===t.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,l)}t.hooks.run("wrap",i);var o="";for(var s in i.attributes)o+=s+'="'+(i.attributes[s]||"")+'"';return"<"+i.tag+' class="'+i.classes.join(" ")+'" '+o+">"+i.content+""},!self.document)return self.addEventListener?(self.addEventListener("message",function(e){var n=JSON.parse(e.data),a=n.language,r=n.code;self.postMessage(JSON.stringify(t.util.encode(t.tokenize(r,t.languages[a])))),self.close()},!1),self.Prism):self.Prism;var a=document.getElementsByTagName("script");return a=a[a.length-1],a&&(t.filename=a.src,document.addEventListener&&!a.hasAttribute("data-manual")&&document.addEventListener("DOMContentLoaded",t.highlightAll)),self.Prism}();"undefined"!=typeof module&&module.exports&&(module.exports=Prism);; +Prism.languages.markup={comment://g,prolog:/<\?.+?\?>/,doctype://,cdata://i,tag:{pattern:/<\/?[\w:-]+\s*(?:\s+[\w:-]+(?:=(?:("|')(\\?[\w\W])*?\1|[^\s'">=]+))?\s*)*\/?>/gi,inside:{tag:{pattern:/^<\/?[\w:-]+/i,inside:{punctuation:/^<\/?/,namespace:/^[\w-]+?:/}},"attr-value":{pattern:/=(?:('|")[\w\W]*?(\1)|[^\s>]+)/gi,inside:{punctuation:/=|>|"/g}},punctuation:/\/?>/g,"attr-name":{pattern:/[\w:-]+/g,inside:{namespace:/^[\w-]+?:/}}}},entity:/\&#?[\da-z]{1,8};/gi},Prism.hooks.add("wrap",function(t){"entity"===t.type&&(t.attributes.title=t.content.replace(/&/,"&"))});; +Prism.languages.css={comment:/\/\*[\w\W]*?\*\//g,atrule:{pattern:/@[\w-]+?.*?(;|(?=\s*{))/gi,inside:{punctuation:/[;:]/g}},url:/url\((["']?).*?\1\)/gi,selector:/[^\{\}\s][^\{\};]*(?=\s*\{)/g,property:/(\b|\B)[\w-]+(?=\s*:)/gi,string:/("|')(\\?.)*?\1/g,important:/\B!important\b/gi,punctuation:/[\{\};:]/g,"function":/[-a-z0-9]+(?=\()/gi},Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{style:{pattern:/[\w\W]*?<\/style>/gi,inside:{tag:{pattern:/|<\/style>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.css}}});; +Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\w\W]*?\*\//g,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*?(\r?\n|$)/g,lookbehind:!0}],string:/("|')(\\?.)*?\1/g,"class-name":{pattern:/((?:(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/gi,lookbehind:!0,inside:{punctuation:/(\.|\\)/}},keyword:/\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/g,"boolean":/\b(true|false)\b/g,"function":{pattern:/[a-z0-9_]+\(/gi,inside:{punctuation:/\(/}},number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?)\b/g,operator:/[-+]{1,2}|!|<=?|>=?|={1,3}|&{1,2}|\|?\||\?|\*|\/|\~|\^|\%/g,ignore:/&(lt|gt|amp);/gi,punctuation:/[{}[\];(),.:]/g};; +Prism.languages.javascript=Prism.languages.extend("clike",{keyword:/\b(break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|finally|for|function|get|if|implements|import|in|instanceof|interface|let|new|null|package|private|protected|public|return|set|static|super|switch|this|throw|true|try|typeof|var|void|while|with|yield)\b/g,number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?|NaN|-?Infinity)\b/g}),Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/(^|[^/])\/(?!\/)(\[.+?]|\\.|[^/\r\n])+\/[gim]{0,3}(?=\s*($|[\r\n,.;})]))/g,lookbehind:!0}}),Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{script:{pattern:/[\w\W]*?<\/script>/gi,inside:{tag:{pattern:/|<\/script>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.javascript}}});; diff --git a/collector/compile-benchmarks/cargo/src/doc/manifest.md b/collector/compile-benchmarks/cargo/src/doc/manifest.md new file mode 100644 index 000000000..6dbed298c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/manifest.md @@ -0,0 +1,769 @@ +% The Manifest Format + +# The `[package]` section + +The first section in a `Cargo.toml` is `[package]`. + +```toml +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["you@example.com"] +``` + +All three of these fields are mandatory. + +## The `version` field + +Cargo bakes in the concept of [Semantic +Versioning](http://semver.org/), so make sure you follow some basic rules: + +* Before you reach 1.0.0, anything goes, but if you make breaking changes, + increment the minor version. In Rust, breaking changes include adding fields to + structs or variants to enums. +* After 1.0.0, only make breaking changes when you increment the major version. + Don’t break the build. +* After 1.0.0, don’t add any new public API (no new `pub` anything) in tiny + versions. Always increment the minor version if you add any new `pub` structs, + traits, fields, types, functions, methods or anything else. +* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. + +## The `build` field (optional) + +This field specifies a file in the project root which is a [build script][1] for +building native code. More information can be found in the build script +[guide][1]. + +[1]: build-script.html + +```toml +[package] +# ... +build = "build.rs" +``` + +## The `documentation` field (optional) + +This field specifies a URL to a website hosting the crate's documentation. +If no URL is specified in the manifest file, [crates.io][cratesio] will +automatically link your crate to the corresponding [docs.rs][docsrs] page. + +Documentation links from specific hosts are blacklisted. Hosts are added +to the blacklist if they are known to not be hosting documentation and are +possibly of malicious intent e.g. ad tracking networks. URLs from the +following hosts are blacklisted: + +* rust-ci.org + +Documentation URLs from blacklisted hosts will not appear on crates.io, and +may be replaced by docs.rs links. + +[docsrs]: https://docs.rs/ +[cratesio]: https://crates.io/ + +## The `exclude` and `include` fields (optional) + +You can explicitly specify to Cargo that a set of [globs][globs] should be +ignored or included for the purposes of packaging and rebuilding a package. The +globs specified in the `exclude` field identify a set of files that are not +included when a package is published as well as ignored for the purposes of +detecting when to rebuild a package, and the globs in `include` specify files +that are explicitly included. + +If a VCS is being used for a package, the `exclude` field will be seeded with +the VCS’ ignore settings (`.gitignore` for git for example). + +```toml +[package] +# ... +exclude = ["build/**/*.o", "doc/**/*.html"] +``` + +```toml +[package] +# ... +include = ["src/**/*", "Cargo.toml"] +``` + +The options are mutually exclusive: setting `include` will override an +`exclude`. Note that `include` must be an exhaustive list of files as otherwise +necessary source files may not be included. + +[globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html + +### Migrating to `gitignore`-like pattern matching + +The current interpretation of these configs is based on UNIX Globs, as +implemented in the [`glob` crate](https://crates.io/crates/glob). We want +Cargo's `include` and `exclude` configs to work as similar to `gitignore` as +possible. [The `gitignore` specification](https://git-scm.com/docs/gitignore) is +also based on Globs, but has a bunch of additional features that enable easier +pattern writing and more control. Therefore, we are migrating the interpretation +for the rules of these configs to use the [`ignore` +crate](https://crates.io/crates/ignore), and treat them each rule as a single +line in a `gitignore` file. See [the tracking +issue](https://github.com/rust-lang/cargo/issues/4268) for more details on the +migration. + +## The `publish` field (optional) + +The `publish` field can be used to prevent a package from being published to a +package registry (like *crates.io*) by mistake. + +```toml +[package] +# ... +publish = false +``` + +## The `workspace` field (optional) + +The `workspace` field can be used to configure the workspace that this package +will be a member of. If not specified this will be inferred as the first +Cargo.toml with `[workspace]` upwards in the filesystem. + +```toml +[package] +# ... +workspace = "path/to/workspace/root" +``` + +For more information, see the documentation for the workspace table below. + +## Package metadata + +There are a number of optional metadata fields also accepted under the +`[package]` section: + +```toml +[package] +# ... + +# A short blurb about the package. This is not rendered in any format when +# uploaded to crates.io (aka this is not markdown). +description = "..." + +# These URLs point to more information about the package. These are +# intended to be webviews of the relevant data, not necessarily compatible +# with VCS tools and the like. +documentation = "..." +homepage = "..." +repository = "..." + +# This points to a file under the package root (relative to this `Cargo.toml`). +# The contents of this file are stored and indexed in the registry. +readme = "..." + +# This is a list of up to five keywords that describe this crate. Keywords +# are searchable on crates.io, and you may choose any words that would +# help someone find this crate. +keywords = ["...", "..."] + +# This is a list of up to five categories where this crate would fit. +# Categories are a fixed list available at crates.io/category_slugs, and +# they must match exactly. +categories = ["...", "..."] + +# This is a string description of the license for this package. Currently +# crates.io will validate the license provided against a whitelist of known +# license identifiers from http://spdx.org/licenses/. Multiple licenses can be +# separated with a `/`. +license = "..." + +# If a project is using a nonstandard license, then this key may be specified in +# lieu of the above key and must point to a file relative to this manifest +# (similar to the readme key). +license-file = "..." + +# Optional specification of badges to be displayed on crates.io. +# +# - The badges pertaining to build status that are currently available are +# Appveyor, CircleCI, GitLab, and TravisCI. +# - Available badges pertaining to code test coverage are Codecov and +# Coveralls. +# - There are also maintenance-related badges basesed on isitmaintained.com +# which state the issue resolution time, percent of open issues, and future +# maintenance intentions. +# +# If a `repository` key is required, this refers to a repository in +# `user/repo` format. +[badges] + +# Appveyor: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`; `id` is optional; you can specify the appveyor project id if you +# want to use that instead. `project_name` is optional; use when the repository +# name differs from the appveyor project name. +appveyor = { repository = "...", branch = "master", service = "github" } + +# Circle CI: `repository` is required. `branch` is optional; default is `master` +circle-ci = { repository = "...", branch = "master" } + +# GitLab: `repository` is required. `branch` is optional; default is `master` +gitlab = { repository = "...", branch = "master" } + +# Travis CI: `repository` in format "/" is required. +# `branch` is optional; default is `master` +travis-ci = { repository = "...", branch = "master" } + +# Codecov: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`. +codecov = { repository = "...", branch = "master", service = "github" } + +# Coveralls: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default) and `bitbucket`. +coveralls = { repository = "...", branch = "master", service = "github" } + +# Is it maintained resolution time: `repository` is required. +is-it-maintained-issue-resolution = { repository = "..." } + +# Is it maintained percentage of open issues: `repository` is required. +is-it-maintained-open-issues = { repository = "..." } + +# Maintenance: `status` is required Available options are `actively-developed`, +# `passively-maintained`, `as-is`, `none`, `experimental`, `looking-for-maintainer` +# and `deprecated`. +maintenance = { status = "..." } +``` + +The [crates.io](https://crates.io) registry will render the description, display +the license, link to the three URLs and categorize by the keywords. These keys +provide useful information to users of the registry and also influence the +search ranking of a crate. It is highly discouraged to omit everything in a +published crate. + +## The `metadata` table (optional) + +Cargo by default will warn about unused keys in `Cargo.toml` to assist in +detecting typos and such. The `package.metadata` table, however, is completely +ignored by Cargo and will not be warned about. This section can be used for +tools which would like to store project configuration in `Cargo.toml`. For +example: + +```toml +[package] +name = "..." +# ... + +# Metadata used when generating an Android APK, for example. +[package.metadata.android] +package-name = "my-awesome-android-app" +assets = "path/to/static" +``` + +# Dependency sections + +See the [specifying dependencies page](specifying-dependencies.html) for +information on the `[dependencies]`, `[dev-dependencies]`, +`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. + +# The `[profile.*]` sections + +Cargo supports custom configuration of how rustc is invoked through profiles at +the top level. Any manifest may declare a profile, but only the top level +project’s profiles are actually read. All dependencies’ profiles will be +overridden. This is done so the top-level project has control over how its +dependencies are compiled. + +There are five currently supported profile names, all of which have the same +configuration available to them. Listed below is the configuration available, +along with the defaults for each profile. + +```toml +# The development profile, used for `cargo build`. +[profile.dev] +opt-level = 0 # controls the `--opt-level` the compiler builds with. + # 0-1 is good for debugging. 2 is well-optimized. Max is 3. +debug = true # include debug information (debug symbols). Equivalent to + # `-C debuginfo=2` compiler flag. +rpath = false # controls whether compiler should set loader paths. + # If true, passes `-C rpath` flag to the compiler. +lto = false # Link Time Optimization usually reduces size of binaries + # and static libraries. Increases compilation time. + # If true, passes `-C lto` flag to the compiler. +debug-assertions = true # controls whether debug assertions are enabled + # (e.g. debug_assert!() and arithmetic overflow checks) +codegen-units = 1 # if > 1 enables parallel code generation which improves + # compile times, but prevents some optimizations. + # Passes `-C codegen-units`. Ignored when `lto = true`. +panic = 'unwind' # panic strategy (`-C panic=...`), can also be 'abort' + +# The release profile, used for `cargo build --release`. +[profile.release] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 1 +panic = 'unwind' + +# The testing profile, used for `cargo test`. +[profile.test] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 1 +panic = 'unwind' + +# The benchmarking profile, used for `cargo bench`. +[profile.bench] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 1 +panic = 'unwind' + +# The documentation profile, used for `cargo doc`. +[profile.doc] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 1 +panic = 'unwind' +``` + +# The `[features]` section + +Cargo supports features to allow expression of: + +* conditional compilation options (usable through `cfg` attributes); +* optional dependencies, which enhance a package, but are not required; and +* clusters of optional dependencies, such as `postgres`, that would include the + `postgres` package, the `postgres-macros` package, and possibly other packages + (such as development-time mocking libraries, debugging tools, etc.). + +A feature of a package is either an optional dependency, or a set of other +features. The format for specifying features is: + +```toml +[package] +name = "awesome" + +[features] +# The default set of optional packages. Most people will want to use these +# packages, but they are strictly optional. Note that `session` is not a package +# but rather another feature listed in this manifest. +default = ["jquery", "uglifier", "session"] + +# A feature with no dependencies is used mainly for conditional compilation, +# like `#[cfg(feature = "go-faster")]`. +go-faster = [] + +# The `secure-password` feature depends on the bcrypt package. This aliasing +# will allow people to talk about the feature in a higher-level way and allow +# this package to add more requirements to the feature in the future. +secure-password = ["bcrypt"] + +# Features can be used to reexport features of other packages. The `session` +# feature of package `awesome` will ensure that the `session` feature of the +# package `cookie` is also enabled. +session = ["cookie/session"] + +[dependencies] +# These packages are mandatory and form the core of this package’s distribution. +cookie = "1.2.0" +oauth = "1.1.0" +route-recognizer = "=2.1.0" + +# A list of all of the optional dependencies, some of which are included in the +# above `features`. They can be opted into by apps. +jquery = { version = "1.0.2", optional = true } +uglifier = { version = "1.5.3", optional = true } +bcrypt = { version = "*", optional = true } +civet = { version = "*", optional = true } +``` + +To use the package `awesome`: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +## Rules + +The usage of features is subject to a few rules: + +* Feature names must not conflict with other package names in the manifest. This + is because they are opted into via `features = [...]`, which only has a single + namespace. +* With the exception of the `default` feature, all features are opt-in. To opt + out of the default feature, use `default-features = false` and cherry-pick + individual features. +* Feature groups are not allowed to cyclically depend on one another. +* Dev-dependencies cannot be optional. +* Features groups can only reference optional dependencies. +* When a feature is selected, Cargo will call `rustc` with `--cfg + feature="${feature_name}"`. If a feature group is included, it and all of its + individual features will be included. This can be tested in code via + `#[cfg(feature = "foo")]`. + +Note that it is explicitly allowed for features to not actually activate any +optional dependencies. This allows packages to internally enable/disable +features without requiring a new dependency. + +## Usage in end products + +One major use-case for this feature is specifying optional features in +end-products. For example, the Servo project may want to include optional +features that people can enable or disable when they build it. + +In that case, Servo will describe features in its `Cargo.toml` and they can be +enabled using command-line flags: + +``` +$ cargo build --release --features "shumway pdf" +``` + +Default features could be excluded using `--no-default-features`. + +## Usage in packages + +In most cases, the concept of *optional dependency* in a library is best +expressed as a separate package that the top-level application depends on. + +However, high-level packages, like Iron or Piston, may want the ability to +curate a number of packages for easy installation. The current Cargo system +allows them to curate a number of mandatory dependencies into a single package +for easy installation. + +In some cases, packages may want to provide additional curation for optional +dependencies: + +* grouping a number of low-level optional dependencies together into a single + high-level feature; +* specifying packages that are recommended (or suggested) to be included by + users of the package; and +* including a feature (like `secure-password` in the motivating example) that + will only work if an optional dependency is available, and would be difficult + to implement as a separate package (for example, it may be overly difficult to + design an IO package to be completely decoupled from OpenSSL, with opt-in via + the inclusion of a separate package). + +In almost all cases, it is an antipattern to use these features outside of +high-level packages that are designed for curation. If a feature is optional, it +can almost certainly be expressed as a separate package. + +# The `[workspace]` section + +Projects can define a workspace which is a set of crates that will all share the +same `Cargo.lock` and output directory. The `[workspace]` table can be defined +as: + +```toml +[workspace] + +# Optional key, inferred from path dependencies if not present. +# Additional non-path dependencies that should be included must be given here. +# In particular, for a virtual manifest, all members have to be listed. +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] + +# Optional key, empty if not present. +exclude = ["path1", "path/to/dir2"] +``` + +Workspaces were added to Cargo as part of [RFC 1525] and have a number of +properties: + +* A workspace can contain multiple crates where one of them is the *root crate*. +* The *root crate*'s `Cargo.toml` contains the `[workspace]` table, but is not + required to have other configuration. +* Whenever any crate in the workspace is compiled, output is placed in the + *workspace root*. i.e. next to the *root crate*'s `Cargo.toml`. +* The lock file for all crates in the workspace resides in the *workspace root*. +* The `[patch]` and `[replace]` sections in `Cargo.toml` are only recognized + in the *root crate*'s manifest, and ignored in member crates' manifests. + +[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md + +The *root crate* of a workspace, indicated by the presence of `[workspace]` in +its manifest, is responsible for defining the entire workspace. All `path` +dependencies residing in the workspace directory become members. You can add +additional packages to the workspace by listing them in the `members` key. Note +that members of the workspaces listed explicitly will also have their path +dependencies included in the workspace. Sometimes a project may have a lot of +workspace members and it can be onerous to keep up to date. The path dependency +can also use [globs][globs] to match multiple paths. Finally, the `exclude` +key can be used to blacklist paths from being included in a workspace. This can +be useful if some path dependencies aren't desired to be in the workspace at +all. + +The `package.workspace` manifest key (described above) is used in member crates +to point at a workspace's root crate. If this key is omitted then it is inferred +to be the first crate whose manifest contains `[workspace]` upwards in the +filesystem. + +A crate may either specify `package.workspace` or specify `[workspace]`. That +is, a crate cannot both be a root crate in a workspace (contain `[workspace]`) +and also be a member crate of another workspace (contain `package.workspace`). + +Most of the time workspaces will not need to be dealt with as `cargo new` and +`cargo init` will handle workspace configuration automatically. + +## Virtual Manifest + +In workspace manifests, if the `package` table is present, the workspace root +crate will be treated as a normal package, as well as a workspace. If the +`package` table is not present in a workspace manifest, it is called a *virtual +manifest*. + +When working with *virtual manifests*, package-related cargo commands, like +`cargo build`, won't be available anymore. But, most of such commands support +the `--all` option, will execute the command for all the non-virtual manifest in +the workspace. + +# The project layout + +If your project is an executable, name the main source file `src/main.rs`. If it +is a library, name the main source file `src/lib.rs`. + +Cargo will also treat any files located in `src/bin/*.rs` as executables. If your +executable consists of more than just one source file, you might also use a directory +inside `src/bin` containing a `main.rs` file which will be treated as an executable +with a name of the parent directory. +Do note, however, once you add a `[[bin]]` section ([see +below](#configuring-a-target)), Cargo will no longer automatically build files +located in `src/bin/*.rs`. Instead you must create a `[[bin]]` section for +each file you want to build. + +Your project can optionally contain folders named `examples`, `tests`, and +`benches`, which Cargo will treat as containing examples, +integration tests, and benchmarks respectively. Analogous to `bin` targets, they +may be composed of single files or directories with a `main.rs` file. + +```notrust +▾ src/ # directory containing source files + lib.rs # the main entry point for libraries and packages + main.rs # the main entry point for projects producing executables + ▾ bin/ # (optional) directory containing additional executables + *.rs + ▾ */ # (optional) directories containing multi-file executables + main.rs +▾ examples/ # (optional) examples + *.rs + ▾ */ # (optional) directories containing multi-file examples + main.rs +▾ tests/ # (optional) integration tests + *.rs + ▾ */ # (optional) directories containing multi-file tests + main.rs +▾ benches/ # (optional) benchmarks + *.rs + ▾ */ # (optional) directories containing multi-file benchmarks + main.rs +``` + +To structure your code after you've created the files and folders for your +project, you should remember to use Rust's module system, which you can read +about in [the book](https://doc.rust-lang.org/book/crates-and-modules.html). + +# Examples + +Files located under `examples` are example uses of the functionality provided by +the library. When compiled, they are placed in the `target/examples` directory. + +They can compile either as executables (with a `main()` function) or libraries +and pull in the library by using `extern crate `. They are +compiled when you run your tests to protect them from bitrotting. + +You can run individual executable examples with the command `cargo run --example +`. + +Specify `crate-type` to make an example be compiled as a library: + +```toml +[[example]] +name = "foo" +crate-type = ["staticlib"] +``` + +You can build individual library examples with the command `cargo build +--example `. + +# Tests + +When you run `cargo test`, Cargo will: + +* compile and run your library’s unit tests, which are in the files reachable + from `lib.rs` (naturally, any sections marked with `#[cfg(test)]` will be + considered at this stage); +* compile and run your library’s documentation tests, which are embedded inside + of documentation blocks; +* compile and run your library’s [integration tests](#integration-tests); and +* compile your library’s examples. + +## Integration tests + +Each file in `tests/*.rs` is an integration test. When you run `cargo test`, +Cargo will compile each of these files as a separate crate. The crate can link +to your library by using `extern crate `, like any other code that +depends on it. + +Cargo will not automatically compile files inside subdirectories of `tests`, but +an integration test can import modules from these directories as usual. For +example, if you want several integration tests to share some code, you can put +the shared code in `tests/common/mod.rs` and then put `mod common;` in each of +the test files. + +# Configuring a target + +All of the `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]` +sections support similar configuration for specifying how a target should be +built. The double-bracket sections like `[[bin]]` are array-of-table of +[TOML](https://github.com/toml-lang/toml#array-of-tables), which means you can +write more than one `[[bin]]` section to make several executables in your crate. + +The example below uses `[lib]`, but it also applies to all other sections +as well. All values listed are the defaults for that option unless otherwise +specified. + +```toml +[package] +# ... + +[lib] +# The name of a target is the name of the library that will be generated. This +# is defaulted to the name of the package or project, with any dashes replaced +# with underscores. (Rust `extern crate` declarations reference this name; +# therefore the value must be a valid Rust identifier to be usable.) +name = "foo" + +# This field points at where the crate is located, relative to the `Cargo.toml`. +path = "src/lib.rs" + +# A flag for enabling unit tests for this target. This is used by `cargo test`. +test = true + +# A flag for enabling documentation tests for this target. This is only relevant +# for libraries, it has no effect on other sections. This is used by +# `cargo test`. +doctest = true + +# A flag for enabling benchmarks for this target. This is used by `cargo bench`. +bench = true + +# A flag for enabling documentation of this target. This is used by `cargo doc`. +doc = true + +# If the target is meant to be a compiler plugin, this field must be set to true +# for Cargo to correctly compile it and make it available for all dependencies. +plugin = false + +# If the target is meant to be a "macros 1.1" procedural macro, this field must +# be set to true. +proc-macro = false + +# If set to false, `cargo test` will omit the `--test` flag to rustc, which +# stops it from generating a test harness. This is useful when the binary being +# built manages the test runner itself. +harness = true +``` + +## The `required-features` field (optional) + +The `required-features` field specifies which features the target needs in order +to be built. If any of the required features are not selected, the target will +be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`, +and `[[example]]` sections, it has no effect on `[lib]`. + +```toml +[features] +# ... +postgres = [] +sqlite = [] +tools = [] + +[[bin]] +# ... +required-features = ["postgres", "tools"] +``` + +# Building dynamic or static libraries + +If your project produces a library, you can specify which kind of library to +build by explicitly listing the library in your `Cargo.toml`: + +```toml +# ... + +[lib] +name = "..." +crate-type = ["dylib"] # could be `staticlib` as well +``` + +The available options are `dylib`, `rlib`, `staticlib`, `cdylib`, and +`proc-macro`. You should only use this option in a project. Cargo will always +compile packages (dependencies) based on the requirements of the project that +includes them. + +You can read more about the different crate types in the +[Rust Reference Manual](https://doc.rust-lang.org/reference/linkage.html) + +# The `[patch]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[patch.crates-io] +foo = { git = 'https://github.com/example/foo' } +bar = { path = 'my/local/bar' } +``` + +The `[patch]` table is made of dependency-like sub-tables. Each key after +`[patch]` is a URL of the source that's being patched, or `crates-io` if +you're modifying the https://crates.io registry. In the example above +`crates-io` could be replaced with a git URL such as +`https://github.com/rust-lang-nursery/log`. + +Each entry in these tables is a normal dependency specification, the same as +found in the `[dependencies]` section of the manifest. The dependencies listed +in the `[patch]` section are resolved and used to patch the source at the +URL specified. The above manifest snippet patches the `crates-io` source (e.g. +crates.io itself) with the `foo` crate and `bar` crate. + +Sources can be patched with versions of crates that do not exist, and they can +also be patched with versions of crates that already exist. If a source is +patched with a crate version that already exists in the source, then the +source's original crate is replaced. + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation and [RFC 1969] for the +technical specification of this feature. Note that the `[patch]` feature will +first become available in Rust 1.21, set to be released on 2017-10-12. + +[RFC 1969]: https://github.com/rust-lang/rfcs/pull/1969 +[replace]: specifying-dependencies.html#overriding-dependencies + +# The `[replace]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[replace] +"foo:0.1.0" = { git = 'https://github.com/example/foo' } +"bar:1.0.2" = { path = 'my/local/bar' } +``` + +Each key in the `[replace]` table is a [package id +specification](pkgid-spec.html) which allows arbitrarily choosing a node in the +dependency graph to override. The value of each key is the same as the +`[dependencies]` syntax for specifying dependencies, except that you can't +specify features. Note that when a crate is overridden the copy it's overridden +with must have both the same name and version, but it can come from a different +source (e.g. git or a local path). + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation. diff --git a/collector/compile-benchmarks/cargo/src/doc/pkgid-spec.md b/collector/compile-benchmarks/cargo/src/doc/pkgid-spec.md new file mode 100644 index 000000000..a2d6a067e --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/pkgid-spec.md @@ -0,0 +1,44 @@ +% Package ID Specifications + +# Package ID specifications + +Subcommands of Cargo frequently need to refer to a particular package within a +dependency graph for various operations like updating, cleaning, building, etc. +To solve this problem, Cargo supports Package ID Specifications. A specification +is a string which is used to uniquely refer to one package within a graph of +packages. + +## Specification grammar + +The formal grammar for a Package Id Specification is: + +```notrust +pkgid := pkgname + | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] +pkgname := name [ ":" semver ] + +proto := "http" | "git" | ... +``` + +Here, brackets indicate that the contents are optional. + +## Example specifications + +These could all be references to a package `foo` version `1.2.3` from the +registry at `crates.io` + +| pkgid | name | version | url | +|:-----------------------------|:-----:|:-------:|:----------------------:| +| `foo` | `foo` | `*` | `*` | +| `foo:1.2.3` | `foo` | `1.2.3` | `*` | +| `crates.io/foo` | `foo` | `*` | `*://crates.io/foo` | +| `crates.io/foo#1.2.3` | `foo` | `1.2.3` | `*://crates.io/foo` | +| `crates.io/bar#foo:1.2.3` | `foo` | `1.2.3` | `*://crates.io/bar` | +| `http://crates.io/foo#1.2.3` | `foo` | `1.2.3` | `http://crates.io/foo` | + +## Brevity of specifications + +The goal of this is to enable both succinct and exhaustive syntaxes for +referring to packages in a dependency graph. Ambiguous references may refer to +one or more packages. Most commands generate an error if more than one package +could be referred to with the same specification. diff --git a/collector/compile-benchmarks/cargo/src/doc/policies.html b/collector/compile-benchmarks/cargo/src/doc/policies.html new file mode 100644 index 000000000..2a0146984 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/policies.html @@ -0,0 +1,10 @@ + + + + + + + + Redirecting to https://crates.io/policies... + + diff --git a/collector/compile-benchmarks/cargo/src/doc/source-replacement.md b/collector/compile-benchmarks/cargo/src/doc/source-replacement.md new file mode 100644 index 000000000..2222f27ba --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/source-replacement.md @@ -0,0 +1,134 @@ +% Replacing sources + +Cargo supports the ability to **replace one source with another** to express +strategies along the lines of mirrors or vendoring dependencies. Configuration +is currently done through the [`.cargo/config` configuration][config] mechanism, +like so: + +[config]: config.html + +```toml +# The `source` table is where all keys related to source-replacement +# are stored. +[source] + +# Under the `source` table are a number of other tables whose keys are a +# name for the relevant source. For example this section defines a new +# source, called `my-awesome-source`, which comes from a directory +# located at `vendor` relative to the directory containing this `.cargo/config` +# file +[source.my-awesome-source] +directory = "vendor" + +# The crates.io default source for crates is available under the name +# "crates-io", and here we use the `replace-with` key to indicate that it's +# replaced with our source above. +[source.crates-io] +replace-with = "my-awesome-source" +``` + +With this configuration Cargo attempts to look up all crates in the directory +"vendor" rather than querying the online registry at crates.io. Using source +replacement Cargo can express: + +* Vendoring - custom sources can be defined which represent crates on the local + filesystem. These sources are subsets of the source that they're replacing and + can be checked into projects if necessary. + +* Mirroring - sources can be replaced with an equivalent version which acts as a + cache for crates.io itself. + +Cargo has a core assumption about source replacement that the source code is +exactly the same from both sources. In our above example Cargo assumes that all +of the crates coming from `my-awesome-source` are the exact same as the copies +from `crates-io`. Note that this also means that `my-awesome-source` is not +allowed to have crates which are not present in the `crates-io` source. + +As a consequence, source replacement is not appropriate for situations such as +patching a dependency or a private registry. Cargo supports patching +dependencies through the usage of [the `[replace]` key][replace-section], and +private registry support is planned for a future version of Cargo. + +[replace-section]: manifest.html#the-replace-section + +## Configuration + +Configuration of replacement sources is done through [`.cargo/config`][config] +and the full set of available keys are: + +```toml +# Each source has its own table where the key is the name of the source +[source.the-source-name] + +# Indicate that `the-source-name` will be replaced with `another-source`, +# defined elsewhere +replace-with = "another-source" + +# Available kinds of sources that can be specified (described below) +registry = "https://example.com/path/to/index" +local-registry = "path/to/registry" +directory = "path/to/vendor" + +# Git sources can optionally specify a branch/tag/rev as well +git = "https://example.com/path/to/repo" +# branch = "master" +# tag = "v1.0.1" +# rev = "313f44e8" +``` + +The `crates-io` represents the crates.io online registry (default source of +crates) and can be replaced with: + +```toml +[source.crates-io] +replace-with = 'another-source' +``` + +## Registry Sources + +A "registry source" is one that is the same as crates.io itself. That is, it has +an index served in a git repository which matches the format of the +[crates.io index](https://github.com/rust-lang/crates.io-index). That repository +then has configuration indicating where to download crates from. + +Currently there is not an already-available project for setting up a mirror of +crates.io. Stay tuned though! + +## Local Registry Sources + +A "local registry source" is intended to be a subset of another registry +source, but available on the local filesystem (aka vendoring). Local registries +are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are +made up of a set of `*.crate` files and an index like the normal registry is. + +The primary way to manage and crate local registry sources is through the +[`cargo-local-registry`][cargo-local-registry] subcommand, available on +crates.io and can be installed with `cargo install cargo-local-registry`. + +[cargo-local-registry]: https://crates.io/crates/cargo-local-registry + +Local registries are contained within one directory and contain a number of +`*.crate` files downloaded from crates.io as well as an `index` directory with +the same format as the crates.io-index project (populated with just entries for +the crates that are present). + +## Directory Sources + +A "directory source" is similar to a local registry source where it contains a +number of crates available on the local filesystem, suitable for vendoring +dependencies. Also like local registries, directory sources can primarily be +managed by an external subcommand, [`cargo-vendor`][cargo-vendor], which can be +installed with `cargo install cargo-vendor`. + +[cargo-vendor]: https://crates.io/crates/cargo-vendor + +Directory sources are distinct from local registries though in that they contain +the unpacked version of `*.crate` files, making it more suitable in some +situations to check everything into source control. A directory source is just a +directory containing a number of other directories which contain the source code +for crates (the unpacked version of `*.crate` files). Currently no restriction +is placed on the name of each directory. + +Each crate in a directory source also has an associated metadata file indicating +the checksum of each file in the crate to protect against accidental +modifications. diff --git a/collector/compile-benchmarks/cargo/src/doc/specifying-dependencies.md b/collector/compile-benchmarks/cargo/src/doc/specifying-dependencies.md new file mode 100644 index 000000000..aa473769a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/specifying-dependencies.md @@ -0,0 +1,525 @@ +% Specifying Dependencies + +Your crates can depend on other libraries from [crates.io], `git` repositories, or +subdirectories on your local file system. You can also temporarily override the +location of a dependency— for example, to be able to test out a bug fix in the +dependency that you are working on locally. You can have different +dependencies for different platforms, and dependencies that are only used during +development. Let's take a look at how to do each of these. + +# Specifying dependencies from crates.io + +Cargo is configured to look for dependencies on [crates.io] by default. Only +the name and a version string are required in this case. In [the cargo +guide](guide.html), we specified a dependency on the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The string `"0.1.12"` is a [semver] version requirement. Since this +string does not have any operators in it, it is interpreted the same way as +if we had specified `"^0.1.12"`, which is called a caret requirement. + +[semver]: https://github.com/steveklabnik/semver#requirements + +## Caret requirements + +**Caret requirements** allow SemVer compatible updates to a specified version. +An update is allowed if the new version number does not modify the left-most +non-zero digit in the major, minor, patch grouping. In this case, if we ran +`cargo update -p time`, cargo would update us to version `0.1.13` if it was +available, but would not update us to `0.2.0`. If instead we had specified the +version string as `^1.0`, cargo would update to `1.1` but not `2.0`. `0.0.x` is +not considered compatible with any other version. + +Here are some more examples of caret requirements and the versions that would +be allowed with them: + +```notrust +^1.2.3 := >=1.2.3 <2.0.0 +^1.2 := >=1.2.0 <2.0.0 +^1 := >=1.0.0 <2.0.0 +^0.2.3 := >=0.2.3 <0.3.0 +^0.0.3 := >=0.0.3 <0.0.4 +^0.0 := >=0.0.0 <0.1.0 +^0 := >=0.0.0 <1.0.0 +``` + +This compatibility convention is different from SemVer in the way it treats +versions before 1.0.0. While SemVer says there is no compatibility before +1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z` +and `x > 0`. + +## Tilde requirements + +**Tilde requirements** specify a minimal version with some ability to update. +If you specify a major, minor, and patch version or only a major and minor +version, only patch-level changes are allowed. If you only specify a major +version, then minor- and patch-level changes are allowed. + +`~1.2.3` is an example of a tilde requirement. + +```notrust +~1.2.3 := >=1.2.3 <1.3.0 +~1.2 := >=1.2.0 <1.3.0 +~1 := >=1.0.0 <2.0.0 +``` + +## Wildcard requirements + +**Wildcard requirements** allow for any version where the wildcard is +positioned. + +`*`, `1.*` and `1.2.*` are examples of wildcard requirements. + +```notrust +* := >=0.0.0 +1.* := >=1.0.0 <2.0.0 +1.2.* := >=1.2.0 <1.3.0 +``` + +## Inequality requirements + +**Inequality requirements** allow manually specifying a version range or an +exact version to depend on. + +Here are some examples of inequality requirements: + +```notrust +>= 1.2.0 +> 1 +< 2 += 1.2.3 +``` + +## Multiple requirements + +Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, +< 1.5`. + +# Specifying dependencies from `git` repositories + +To depend on a library located in a `git` repository, the minimum information +you need to specify is the location of the repository with the `git` key: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand" } +``` + +Cargo will fetch the `git` repository at this location then look for a +`Cargo.toml` for the requested crate anywhere inside the `git` repository +(not necessarily at the root). + +Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. +You can combine the `git` key with the `rev`, `tag`, or `branch` keys to +specify something else. Here's an example of specifying that you want to use +the latest commit on a branch named `next`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand", branch = "next" } +``` + +# Specifying path dependencies + +Over time, our `hello_world` project from [the guide](guide.html) has grown +significantly in size! It’s gotten to the point that we probably want to +split out a separate crate for others to use. To do this Cargo supports +**path dependencies** which are typically sub-crates that live within one +repository. Let’s start off by making a new crate inside of our `hello_world` +project: + +```shell +# inside of hello_world/ +$ cargo new hello_utils +``` + +This will create a new folder `hello_utils` inside of which a `Cargo.toml` and +`src` folder are ready to be configured. In order to tell Cargo about this, open +up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: + +```toml +[dependencies] +hello_utils = { path = "hello_utils" } +``` + +This tells Cargo that we depend on a crate called `hello_utils` which is found +in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). + +And that’s it! The next `cargo build` will automatically build `hello_utils` and +all of its own dependencies, and others can also start using the crate as well. +However, crates that use dependencies specified with only a path are not +permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we +would need to publish a version of `hello_utils` to [crates.io](https://crates.io) +and specify its version in the dependencies line as well: + +```toml +[dependencies] +hello_utils = { path = "hello_utils", version = "0.1.0" } +``` + +# Overriding dependencies + +There are a number of methods in Cargo to support overriding dependencies and +otherwise controlling the dependency graph. These options are typically, though, +only available at the workspace level and aren't propagated through +dependencies. In other words, "applications" have the ability to override +dependencies but "libraries" do not. + +The desire to override a dependency or otherwise alter some dependencies can +arise through a number of scenarios. Most of them, however, boil down to the +ability to work with a crate before it's been published to crates.io. For +example: + +* A crate you're working on is also used in a much larger application you're + working on, and you'd like to test a bug fix to the library inside of the + larger application. +* An upstream crate you don't work on has a new feature or a bug fix on the + master branch of its git repository which you'd like to test out. +* You're about to publish a new major version of your crate, but you'd like to + do integration testing across an entire project to ensure the new major + version works. +* You've submitted a fix to an upstream crate for a bug you found, but you'd + like to immediately have your application start depending on the fixed version + of the crate to avoid blocking on the bug fix getting merged. + +These scenarios are currently all solved with the [`[patch]` manifest +section][patch-section]. (Note that the `[patch]` feature will first become +available in Rust 1.21, set to be released on 2017-10-12.) Historically some of +these scenarios have been solved with [the `[replace]` section][replace-section], +but we'll document the `[patch]` section here. + +[patch-section]: manifest.html#the-patch-section +[replace-section]: manifest.html#the-replace-section + +### Testing a bugfix + +Let's say you're working with the [`uuid`] crate but while you're working on it +you discover a bug. You are, however, quite enterprising so you decide to also +try out to fix the bug! Originally your manifest will look like: + +[`uuid`](https://crates.io/crates/uuid) + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0" +``` + +First thing we'll do is to clone the [`uuid` repository][uuid-repository] +locally via: + +```shell +$ git clone https://github.com/rust-lang-nursery/uuid +``` + +Next we'll edit the manifest of `my-library` to contain: + +```toml +[patch.crates-io] +uuid = { path = "../path/to/uuid" } +``` + +Here we declare that we're *patching* the source `crates-io` with a new +dependency. This will effectively add the local checked out version of `uuid` to +the crates.io registry for our local project. + +Next up we need to ensure that our lock file is updated to use this new version +of `uuid` so our project uses the locally checked out copy instead of one from +crates.io. The way `[patch]` works is that it'll load the dependency at +`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` +it'll *also* return the local version. + +This means that the version number of the local checkout is significant and will +affect whether the patch is used. Our manifest declared `uuid = "1.0"` which +means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution +algorithm also means that we'll resolve to the maximum version within that +range. Typically this doesn't matter as the version of the git repository will +already be greater or match the maximum version published on crates.io, but it's +important to keep this in mind! + +In any case, typically all you need to do now is: + +```shell +$ cargo build + Compiling uuid v1.0.0 (file://.../uuid) + Compiling my-library v0.1.0 (file://.../my-library) + Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs +``` + +And that's it! You're now building with the local version of `uuid` (note the +`file://` in the build output). If you don't see the `file://` version getting +built then you may need to run `cargo update -p uuid --precise $version` where +`$version` is the version of the locally checked out copy of `uuid`. + +Once you've fixed the bug you originally found the next thing you'll want to do +is to likely submit that as a pull request to the `uuid` crate itself. Once +you've done this then you can also update the `[patch]` section. The listing +inside of `[patch]` is just like the `[dependencies]` section, so once your pull +request is merged you could change your `path` dependency to: + +```toml +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +[uuid-repository]: https://github.com/rust-lang-nursery/uuid + +### Working with an unpublished minor version + +Let's now shift gears a bit from bug fixes to adding features. While working on +`my-library` you discover that a whole new feature is needed in the `uuid` +crate. You've implemented this feature, tested it locally above with `[patch]`, +and submitted a pull request. Let's go over how you continue to use and test it +before it's actually published. + +Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but +since then the master branch of the git repository has updated to `1.0.1`. This +branch includes your new feature you submitted previously. To use this +repository we'll edit our `Cargo.toml` to look like + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0.1" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Note that our local dependency on `uuid` has been updated to `1.0.1` as it's +what we'll actually require once the crate is published. This version doesn't +exist on crates.io, though, so we provide it with the `[patch]` section of the +manifest. + +Now when our library is built it'll fetch `uuid` from the git repository and +resolve to 1.0.1 inside the repository instead of trying to download a version +from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can +be deleted. + +It's also worth nothing that `[patch]` applies *transitively*. Let's say you use +`my-library` in a larger project, such as: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Remember that `[patch]` is only applicable at the *top level* so we consumers of +`my-library` have to repeat the `[patch]` section if necessary. Here, though, +the new `uuid` crate applies to *both* our dependency on `uuid` and the +`my-library -> uuid` dependency. The `uuid` crate will be resolved to one +version for this entire crate graph, 1.0.1, and it'll be pulled from the git +repository. + +### Prepublishing a breaking change + +As a final scenario, let's take a look at working with a new major version of a +crate, typically accompanied with breaking changes. Sticking with our previous +crates, this means that we're going to be creating version 2.0.0 of the `uuid` +crate. After we've submitted all changes upstream we can update our manifest for +`my-library` to look like: + +```toml +[dependencies] +uuid = "2.0" + +[patch.crates-io] +uuid = { git = "https://github.com/rust-lang-nursery/uuid", branch = "2.0.0" } +``` + +And that's it! Like with the previous example the 2.0.0 version doesn't actually +exist on crates.io but we can still put it in through a git dependency through +the usage of the `[patch]` section. As a thought exercise let's take another +look at the `my-binary` manifest from above again as well: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid', version = '2.0.0' } +``` + +Note that this will actually resolve to two versions of the `uuid` crate. The +`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but +the `my-library` crate will use the 2.0.0 version of `uuid`. This will allow you +to gradually roll out breaking changes to a crate through a dependency graph +without being force to update everything all at once. + +### Overriding with local dependencies + +Sometimes you're only temporarily working on a crate and you don't want to have +to modify `Cargo.toml` like with the `[patch]` section above. For this use +case Cargo offers a much more limited version of overrides called **path +overrides**. + +Path overrides are specified through `.cargo/config` instead of `Cargo.toml`, +and you can find [more documentation about this configuration][config-docs]. +Inside of `.cargo/config` you'll specify a key called `paths`: + +[config-docs]: config.html + +```toml +paths = ["/path/to/uuid"] +``` + +This array should be filled with directories that contain a `Cargo.toml`. In +this instance, we’re just adding `uuid`, so it will be the only one that’s +overridden. This path can be either absolute or relative to the directory that +contains the `.cargo` folder. + +Path overrides are more restricted than the `[patch]` section, however, in +that they cannot change the structure of the dependency graph. When a +path replacement is used then the previous set of dependencies +must all match exactly to the new `Cargo.toml` specification. For example this +means that path overrides cannot be used to test out adding a dependency to a +crate, instead `[patch]` must be used in that situation. As a result usage of a +path override is typically isolated to quick bug fixes rather than larger +changes. + +Note: using a local configuration to override paths will only work for crates +that have been published to [crates.io]. You cannot use this feature to tell +Cargo how to find local unpublished crates. + +# Platform specific dependencies + + +Platform-specific dependencies take the same format, but are listed under a +`target` section. Normally Rust-like `#[cfg]` syntax will be used to define +these sections: + +```toml +[target.'cfg(windows)'.dependencies] +winhttp = "0.4.0" + +[target.'cfg(unix)'.dependencies] +openssl = "1.0.1" + +[target.'cfg(target_arch = "x86")'.dependencies] +native = { path = "native/i686" } + +[target.'cfg(target_arch = "x86_64")'.dependencies] +native = { path = "native/x86_64" } +``` + +Like with Rust, the syntax here supports the `not`, `any`, and `all` operators +to combine various cfg name/value pairs. Note that the `cfg` syntax has only +been available since Cargo 0.9.0 (Rust 1.8.0). + +In addition to `#[cfg]` syntax, Cargo also supports listing out the full target +the dependencies would apply to: + +```toml +[target.x86_64-pc-windows-gnu.dependencies] +winhttp = "0.4.0" + +[target.i686-unknown-linux-gnu.dependencies] +openssl = "1.0.1" +``` + +If you’re using a custom target specification, quote the full path and file +name: + +```toml +[target."x86_64/windows.json".dependencies] +winhttp = "0.4.0" + +[target."i686/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/i686" } + +[target."x86_64/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/x86_64" } +``` + +# Development dependencies + +You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format +is equivalent to `[dependencies]`: + +```toml +[dev-dependencies] +tempdir = "0.3" +``` + +Dev-dependencies are not used when compiling +a package for building, but are used for compiling tests, examples, and +benchmarks. + +These dependencies are *not* propagated to other packages which depend on this +package. + +You can also have target-specific development dependencies by using +`dev-dependencies` in the target section header instead of `dependencies`. For +example: + +```toml +[target.'cfg(unix)'.dev-dependencies] +mio = "0.0.1" +``` + +[crates.io]: https://crates.io/ + +# Build dependencies + +You can depend on other Cargo-based crates for use in your build scripts. +Dependencies are declared through the `build-dependencies` section of the +manifest: + +```toml +[build-dependencies] +gcc = "0.3" +``` + +The build script **does not** have access to the dependencies listed +in the `dependencies` or `dev-dependencies` section. Build +dependencies will likewise not be available to the package itself +unless listed under the `dependencies` section as well. A package +itself and its build script are built separately, so their +dependencies need not coincide. Cargo is kept simpler and cleaner by +using independent dependencies for independent purposes. + +# Choosing features + +If a package you depend on offers conditional features, you can +specify which to use: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +More information about features can be found in the +[manifest documentation](manifest.html#the-features-section). diff --git a/collector/compile-benchmarks/cargo/src/doc/stylesheets/all.css b/collector/compile-benchmarks/cargo/src/doc/stylesheets/all.css new file mode 100644 index 000000000..46b0343ea --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/stylesheets/all.css @@ -0,0 +1,291 @@ +html { + background: url("../images/noise.png"); + background-color: #3b6837; +} + +main, #header { width: 900px; } + +* { + box-sizing: border-box; +} + +body { + display: -webkit-flex; + display: flex; + -webkit-flex-direction: column; + flex-direction: column; + -webkit-align-items: center; + align-items: center; + font-family: sans-serif; + font-size: 16px; +} + +a { color: #00ac5b; text-decoration: none; } +a:hover { color: #00793f; } + +h1 { + font-size: 24px; + margin: 20px 0 10px 0; + font-weight: bold; + color: #b64790; +} + +h1 code:not(.highlight) { + color: #d9a700; + vertical-align: bottom; +} +h1 a, h2 a { color: #b64790; text-decoration: none; } +h1:hover a, h2:hover a { color: #A03D7E; } +h1:hover a:after, +h2:hover a:after { content: '\2002\00a7\2002'; } +:target { background: rgba(239, 242, 178, 1); padding: 5px; } + +h1.title { /* style rustdoc-generated title */ + width: 100%; + padding: 40px 20px 40px 60px; + background-color: #edebdd; + margin-bottom: 20px; + -webkit-border-radius: 5px; + -moz-border-radius: 5px; + -ms-border-radius: 5px; + border-radius: 5px; + margin: 0; + color: #383838; + font-size: 2em; + background-image: url(../images/circle-with-i.png); + background-repeat: no-repeat; + background-position: 20px center; +} + +h2 { + font-size: 18px; + margin: 15px 0 5px 0; + color: #b64790; + font-weight: bold; +} + +h2 code:not(.highlight) { color: #d9a700; } + +code:not(.highlight) { + font-family: monospace; + color: #b64790; +} + +main { + display: -webkit-flex; + display: flex; + -webkit-flex-direction: column; + flex-direction: column; + + width: 100%; + max-width: 900px; + margin-bottom: 10px; + + background-color: #f9f7ec; + padding: 15px; + + -webkit-border-radius: 10px; + -moz-border-radius: 10px; + -ms-border-radius: 10px; + border-radius: 10px; + box-shadow: 0px 0px 5px 2px #3b6837; + border: 5px solid #62865f; + color: #383838; +} + +main > p:first-child { + font-weight: 500; + margin-top: 3px; + padding-bottom: 15px; + border-bottom: 1px solid #62865f; + text-align: center; +} + +main p:first-child a { color: #3b6837; } +main p:first-child a:hover { color: #62865f; } + +main p, main ul { + /* color: #3b6837; */ + margin: 10px 0; + line-height: 150%; +} + +main ul { margin-left: 20px; } +main li { list-style-type: disc; } +main strong { font-weight: bold; } + +img.logo { + align-self: center; + margin-bottom: 10px; +} + +pre { + padding: 10px; + margin: 10px 0; + /* border: 1px solid #cad0d0; */ + border-radius: 4px; + max-width: calc(100vw - 45px); + overflow-x: auto; + + background: #383838 !important; + color: white; + padding: 20px; + + /* override prism.js styles */ + font-size: 1em !important; + border: none !important; + box-shadow: none !important; + text-shadow: none !important; +} + +pre code { + text-shadow: none !important; +} + +footer { + padding: 40px; + width: 900px; +} +footer a { + color: white; +} +footer a:hover { + color: #e6e6e6; +} +footer .sep, #header .sep { color: #284725; } +footer .sep { margin: 0 10px; } +#header .sep { margin-left: 10px; } + +.headerlink { + display: none; + text-decoration: none; +} +.fork-me { + position:absolute; + top:0; + right:0; +} + +.token.toml-section { color: #CB4B16; } +.token.toml-key { color: #268BD2; } + +/* Rust code highlighting */ +pre.rust .kw { color: #8959A8; } +pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; } +pre.rust .number, pre.rust .string { color: #718C00; } +pre.rust .self, pre.rust .boolval, pre.rust .prelude-val, +pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; } +pre.rust .comment { color: #8E908C; } +pre.rust .doccomment { color: #4D4D4C; } +pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; } +pre.rust .lifetime { color: #B76514; } +code span.s1 { color: #2AA198; } + +table th { border-bottom: 1px solid black; } +table td, table th { padding: 5px 10px; } + +#header { + color: white; + position: relative; + height: 100px; + display: -webkit-flex; + display: flex; + -webkit-align-items: center; + align-items: center; +} +#header h1 { font-size: 2em; } +#header a, #header h1 { color: white; text-decoration: none; } +#header a:hover { color: #d9d9d9; } + +#header input.search { + border: none; + color: black; + outline: 0; + margin-left: 30px; + padding: 5px 5px 5px 25px; + background-image: url(../images/search.png); + background-repeat: no-repeat; + background-position: 6px 6px; + -webkit-border-radius: 15px; + -moz-border-radius: 15px; + -ms-border-radius: 15px; + border-radius: 15px; +} + +#header .nav { + -webkit-flex-grow: 2; + flex-grow: 2; + text-align: right; +} + +button.dropdown, a.dropdown { cursor: pointer; } +button.dropdown .arrow, a.dropdown .arrow { + font-size: 50%; display: inline-block; vertical-align: middle; +} +button.dropdown .arrow::after, a.dropdown .arrow::after { content: "▼"; } +button.active.dropdown .arrow::after, a.active.dropdown .arrow::after { + content: "▲"; +} + +button { + background: none; + outline: 0; + border: 0; + padding: 10px; + color: white; +} + +button.active { + background:#2a4f27; + box-shadow:inset -2px 2px 4px 0 #243d26 +} + +ul.dropdown { + display: none; + visibility: none; + position: absolute; + top: 100%; + right: 0; + width: 100%; + min-width: 150px; + opacity: 0; + margin: 0; + text-align: left; + padding: 0; + background: white; + border: 1px solid #d5d3cb; + list-style: none; + z-index: 10; + -webkit-border-radius: 5px; + -moz-border-radius: 5px; + -ms-border-radius: 5px; + border-radius: 5px; +} + +ul.dropdown li a { + font-size: 90%; + width: 100%; + display: inline-block; + padding: 8px 10px; + text-decoration: none; + color: #383838 !important; +} + +ul.dropdown li a:hover { + background: #5e5e5e; + color: white !important; +} +ul.dropdown li.last { border-top: 1px solid #d5d3cb; } +ul.dropdown.open { + display: block; + visibility: visible; + opacity: 1; +} +.dropdown-container { + display: inline-block; + position: relative; +} + +p > img { + max-width: 100%; +} diff --git a/collector/compile-benchmarks/cargo/src/doc/stylesheets/normalize.css b/collector/compile-benchmarks/cargo/src/doc/stylesheets/normalize.css new file mode 100644 index 000000000..73abb76fa --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/stylesheets/normalize.css @@ -0,0 +1,375 @@ +/*! normalize.css v2.0.1 | MIT License | git.io/normalize */ + +/* ========================================================================== + HTML5 display definitions + ========================================================================== */ + +/* + * Corrects `block` display not defined in IE 8/9. + */ + +article, +aside, +details, +figcaption, +figure, +footer, +header, +hgroup, +nav, +section, +summary { + display: block; +} + +/* + * Corrects `inline-block` display not defined in IE 8/9. + */ + +audio, +canvas, +video { + display: inline-block; +} + +/* + * Prevents modern browsers from displaying `audio` without controls. + * Remove excess height in iOS 5 devices. + */ + +audio:not([controls]) { + display: none; + height: 0; +} + +/* + * Addresses styling for `hidden` attribute not present in IE 8/9. + */ + +[hidden] { + display: none; +} + +/* ========================================================================== + Base + ========================================================================== */ + +/* + * 1. Sets default font family to sans-serif. + * 2. Prevents iOS text size adjust after orientation change, without disabling + * user zoom. + */ + +html { + font-family: sans-serif; /* 1 */ + -webkit-text-size-adjust: 100%; /* 2 */ + -ms-text-size-adjust: 100%; /* 2 */ +} + +/* + * Removes default margin. + */ + +body { + margin: 0; +} + +/* ========================================================================== + Links + ========================================================================== */ + +/* + * Addresses `outline` inconsistency between Chrome and other browsers. + */ + +a:focus { + outline: thin dotted; +} + +/* + * Improves readability when focused and also mouse hovered in all browsers. + */ + +a:active, +a:hover { + outline: 0; +} + +/* ========================================================================== + Typography + ========================================================================== */ + +/* + * Addresses `h1` font sizes within `section` and `article` in Firefox 4+, + * Safari 5, and Chrome. + */ + +h1 { + font-size: 2em; +} + +/* + * Addresses styling not present in IE 8/9, Safari 5, and Chrome. + */ + +abbr[title] { + border-bottom: 1px dotted; +} + +/* + * Addresses style set to `bolder` in Firefox 4+, Safari 5, and Chrome. + */ + +b, +strong { + font-weight: bold; +} + +/* + * Addresses styling not present in Safari 5 and Chrome. + */ + +dfn { + font-style: italic; +} + +/* + * Addresses styling not present in IE 8/9. + */ + +mark { + background: #ff0; + color: #000; +} + + +/* + * Corrects font family set oddly in Safari 5 and Chrome. + */ + +code, +kbd, +pre, +samp { + font-family: monospace, serif; + font-size: 1em; +} + +/* + * Improves readability of pre-formatted text in all browsers. + */ + +pre { + white-space: pre; + white-space: pre-wrap; + word-wrap: break-word; +} + +/* + * Sets consistent quote types. + */ + +q { + quotes: "\201C" "\201D" "\2018" "\2019"; +} + +/* + * Addresses inconsistent and variable font size in all browsers. + */ + +small { + font-size: 80%; +} + +/* + * Prevents `sub` and `sup` affecting `line-height` in all browsers. + */ + +sub, +sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sup { + top: -0.5em; +} + +sub { + bottom: -0.25em; +} + +/* ========================================================================== + Embedded content + ========================================================================== */ + +/* + * Removes border when inside `a` element in IE 8/9. + */ + +img { + border: 0; +} + +/* + * Corrects overflow displayed oddly in IE 9. + */ + +svg:not(:root) { + overflow: hidden; +} + +/* ========================================================================== + Figures + ========================================================================== */ + +/* + * Addresses margin not present in IE 8/9 and Safari 5. + */ + +figure { + margin: 0; +} + +/* ========================================================================== + Forms + ========================================================================== */ + +/* + * Define consistent border, margin, and padding. + */ + +fieldset { + border: 1px solid #c0c0c0; + margin: 0 2px; + padding: 0.35em 0.625em 0.75em; +} + +/* + * 1. Corrects color not being inherited in IE 8/9. + * 2. Remove padding so people aren't caught out if they zero out fieldsets. + */ + +legend { + border: 0; /* 1 */ + padding: 0; /* 2 */ +} + +/* + * 1. Corrects font family not being inherited in all browsers. + * 2. Corrects font size not being inherited in all browsers. + * 3. Addresses margins set differently in Firefox 4+, Safari 5, and Chrome + */ + +button, +input, +select, +textarea { + font-family: inherit; /* 1 */ + font-size: 100%; /* 2 */ + margin: 0; /* 3 */ +} + +/* + * Addresses Firefox 4+ setting `line-height` on `input` using `!important` in + * the UA stylesheet. + */ + +button, +input { + line-height: normal; +} + +/* + * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` + * and `video` controls. + * 2. Corrects inability to style clickable `input` types in iOS. + * 3. Improves usability and consistency of cursor style between image-type + * `input` and others. + */ + +button, +html input[type="button"], /* 1 */ +input[type="reset"], +input[type="submit"] { + -webkit-appearance: button; /* 2 */ + cursor: pointer; /* 3 */ +} + +/* + * Re-set default cursor for disabled elements. + */ + +button[disabled], +input[disabled] { + cursor: default; +} + +/* + * 1. Addresses box sizing set to `content-box` in IE 8/9. + * 2. Removes excess padding in IE 8/9. + */ + +input[type="checkbox"], +input[type="radio"] { + box-sizing: border-box; /* 1 */ + padding: 0; /* 2 */ +} + +/* + * 1. Addresses `appearance` set to `searchfield` in Safari 5 and Chrome. + * 2. Addresses `box-sizing` set to `border-box` in Safari 5 and Chrome + * (include `-moz` to future-proof). + */ + +input[type="search"] { + -webkit-appearance: textfield; /* 1 */ + -moz-box-sizing: content-box; + -webkit-box-sizing: content-box; /* 2 */ + box-sizing: content-box; +} + +/* + * Removes inner padding and search cancel button in Safari 5 and Chrome + * on OS X. + */ + +input[type="search"]::-webkit-search-cancel-button, +input[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} + +/* + * Removes inner padding and border in Firefox 4+. + */ + +button::-moz-focus-inner, +input::-moz-focus-inner { + border: 0; + padding: 0; +} + +/* + * 1. Removes default vertical scrollbar in IE 8/9. + * 2. Improves readability and alignment in all browsers. + */ + +textarea { + overflow: auto; /* 1 */ + vertical-align: top; /* 2 */ +} + +/* ========================================================================== + Tables + ========================================================================== */ + +/* + * Remove most spacing between table cells. + */ + +table { + border-collapse: collapse; + border-spacing: 0; +} \ No newline at end of file diff --git a/collector/compile-benchmarks/cargo/src/doc/stylesheets/prism.css b/collector/compile-benchmarks/cargo/src/doc/stylesheets/prism.css new file mode 100644 index 000000000..d80a94102 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/doc/stylesheets/prism.css @@ -0,0 +1,197 @@ +/* http://prismjs.com/download.html?themes=prism-twilight&languages=markup+css+clike+javascript */ +/** + * prism.js Twilight theme + * Based (more or less) on the Twilight theme originally of Textmate fame. + * @author Remy Bach + */ +code[class*="language-"], +pre[class*="language-"] { + color: white; + direction: ltr; + font-family: Consolas, Monaco, 'Andale Mono', monospace; + text-align: left; + text-shadow: 0 -.1em .2em black; + white-space: pre; + word-spacing: normal; + word-break: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +pre[class*="language-"], +:not(pre) > code[class*="language-"] { + background: hsl(0, 0%, 8%); /* #141414 */ +} + +/* Code blocks */ +pre[class*="language-"] { + border-radius: .5em; + border: .3em solid hsl(0, 0%, 33%); /* #282A2B */ + box-shadow: 1px 1px .5em black inset; + margin: .5em 0; + overflow: auto; + padding: 1em; +} + +pre[class*="language-"]::selection { + /* Safari */ + background: hsl(200, 4%, 16%); /* #282A2B */ +} + +pre[class*="language-"]::selection { + /* Firefox */ + background: hsl(200, 4%, 16%); /* #282A2B */ +} + +/* Text Selection colour */ +pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection, +code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection { + text-shadow: none; + background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ +} + +pre[class*="language-"]::selection, pre[class*="language-"] ::selection, +code[class*="language-"]::selection, code[class*="language-"] ::selection { + text-shadow: none; + background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ +} + +/* Inline code */ +:not(pre) > code[class*="language-"] { + border-radius: .3em; + border: .13em solid hsl(0, 0%, 33%); /* #545454 */ + box-shadow: 1px 1px .3em -.1em black inset; + padding: .15em .2em .05em; +} + +.token.comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: hsl(0, 0%, 47%); /* #777777 */ +} + +.token.punctuation { + opacity: .7; +} + +.namespace { + opacity: .7; +} + +.token.tag, +.token.boolean, +.token.number, +.token.deleted { + color: hsl(14, 58%, 55%); /* #CF6A4C */ +} + +.token.keyword, +.token.property, +.token.selector, +.token.constant, +.token.symbol, +.token.builtin { + color: hsl(53, 89%, 79%); /* #F9EE98 */ +} + +.token.attr-name, +.token.attr-value, +.token.string, +.token.char, +.token.operator, +.token.entity, +.token.url, +.language-css .token.string, +.style .token.string, +.token.variable, +.token.inserted { + color: hsl(76, 21%, 52%); /* #8F9D6A */ +} + +.token.atrule { + color: hsl(218, 22%, 55%); /* #7587A6 */ +} + +.token.regex, +.token.important { + color: hsl(42, 75%, 65%); /* #E9C062 */ +} + +.token.important { + font-weight: bold; +} + +.token.entity { + cursor: help; +} + +pre[data-line] { + padding: 1em 0 1em 3em; + position: relative; +} + +/* Markup */ +.language-markup .token.tag, +.language-markup .token.attr-name, +.language-markup .token.punctuation { + color: hsl(33, 33%, 52%); /* #AC885B */ +} + +/* Make the tokens sit above the line highlight so the colours don't look faded. */ +.token { + position: relative; + z-index: 1; +} + +.line-highlight { + background: -moz-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ + background: -o-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ + background: -webkit-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ + background: hsla(0, 0%, 33%, 0.25); /* #545454 */ + background: linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ + border-bottom: 1px dashed hsl(0, 0%, 33%); /* #545454 */ + border-top: 1px dashed hsl(0, 0%, 33%); /* #545454 */ + left: 0; + line-height: inherit; + margin-top: 0.75em; /* Same as .prism’s padding-top */ + padding: inherit 0; + pointer-events: none; + position: absolute; + right: 0; + white-space: pre; + z-index: 0; +} + +.line-highlight:before, +.line-highlight[data-end]:after { + background-color: hsl(215, 15%, 59%); /* #8794A6 */ + border-radius: 999px; + box-shadow: 0 1px white; + color: hsl(24, 20%, 95%); /* #F5F2F0 */ + content: attr(data-start); + font: bold 65%/1.5 sans-serif; + left: .6em; + min-width: 1em; + padding: 0 .5em; + position: absolute; + text-align: center; + text-shadow: none; + top: .4em; + vertical-align: .3em; +} + +.line-highlight[data-end]:after { + bottom: .4em; + content: attr(data-end); + top: auto; +} + diff --git a/collector/compile-benchmarks/cargo/src/etc/_cargo b/collector/compile-benchmarks/cargo/src/etc/_cargo new file mode 100644 index 000000000..253e1fc54 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/_cargo @@ -0,0 +1,544 @@ +#compdef cargo + +autoload -U regexp-replace + +zstyle -T ':completion:*:*:cargo:*' tag-order && \ + zstyle ':completion:*:*:cargo:*' tag-order 'common-commands' + +_cargo() { +local context state state_descr line +typeset -A opt_args + +# leading items in parentheses are an exclusion list for the arguments following that arg +# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions +# - => exclude all other options +# 1 => exclude positional arg 1 +# * => exclude all other args +# +blah => exclude +blah +_arguments \ + '(- 1 *)'{-h,--help}'[show help message]' \ + '(- 1 *)--list[list installed commands]' \ + '(- 1 *)'{-V,--version}'[show version information]' \ + {-v,--verbose}'[use verbose output]' \ + --color'[colorization option]' \ + '(+beta +nightly)+stable[use the stable toolchain]' \ + '(+stable +nightly)+beta[use the beta toolchain]' \ + '(+stable +beta)+nightly[use the nightly toolchain]' \ + '1: :->command' \ + '*:: :->args' + +case $state in + command) + _alternative 'common-commands:common:_cargo_cmds' 'all-commands:all:_cargo_all_cmds' + ;; + + args) + case $words[1] in + bench) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + build) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + check) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not check the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to check]:packages:_get_package_names' \ + '--release=[check in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + clean) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[whether or not to clean release artifacts]' \ + '--target=[target triple(default:all)]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + doc) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-deps[do not build docs for dependencies]' \ + '--no-default-features[do not build the default features]' \ + '--open[open docs in browser after the build]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + fetch) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + generate-lockfile) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + git-checkout) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + 'q(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--reference=[REF]' \ + '--url=[URL]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + help) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '*: :_cargo_cmds' \ + ;; + + init) + _arguments \ + '--bin[use binary template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + install) + _arguments \ + '--bin=[only install the specified binary]' \ + '--branch=[branch to use when installing from git]' \ + '--color=:colorization option:(auto always never)' \ + '--debug[build in debug mode instead of release mode]' \ + '--example[install the specified example instead of binaries]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--git=[URL from which to install the crate]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--no-default-features[do not build the default features]' \ + '--path=[local filesystem path to crate to install]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--rev=[specific commit to use when installing from git]' \ + '--root=[directory to install packages into]: :_files -/' \ + '--tag=[tag to use when installing from git]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--vers=[version to install from crates.io]' \ + ;; + + locate-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + ;; + + login) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + metadata) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "--no-deps[output information only about the root package and don't fetch dependencies]" \ + '--no-default-features[do not include the default feature]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--format-version=[format version(default: 1)]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + new) + _arguments \ + '--bin[use binary template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + owner) + _arguments \ + '(-a, --add)'{-a,--add}'[add owner LOGIN]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-l, --list)'{-l,--list}'[list owners of a crate]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \ + '--token[API token to use when authenticating]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + package) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-l, --list)'{-l,--list}'[print files included in a package without making one]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ + '--no-verify[do not build to verify contents]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + pkgid) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + publish) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-verify[Do not verify tarball until before publish]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[token to use when uploading]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + read-manifest) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + run) + _arguments \ + '--example=[name of the bin target]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--bin=[name of the bin target]' \ + '--no-default-features[do not build the default features]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '*: :_normal' \ + ;; + + rustc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[features to compile for the package]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to fetch dependencies for]: :_files -/' \ + '--no-default-features[do not compile default features for the package]' \ + '(-p, --package)'{-p,--package}'=[profile to compile for]' \ + '--profile=[profile to build the selected target for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple which compiles will be for]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + rustdoc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[space-separated list of features to also build]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to document]: :_files -/' \ + '--no-default-features[do not build the `default` feature]' \ + '--open[open the docs in a browser after the operation]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + search) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[host of a registry to search in]' \ + '--limit=[limit the number of results]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + test) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--test=[test name]: :_test_names' \ + '--no-default-features[do not build the default features]' \ + '--no-fail-fast[run all tests regardless of failure]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '1: :_test_names' \ + '(--doc --bin --example --test --bench)--lib[only test library]' \ + '(--lib --bin --example --test --bench)--doc[only test documentation]' \ + '(--lib --doc --example --test --bench)--bin=[binary name]' \ + '(--lib --doc --bin --test --bench)--example=[example name]' \ + '(--lib --doc --bin --example --bench)--test=[test name]' \ + '(--lib --doc --bin --example --test)--bench=[benchmark name]' \ + '--message-format:error format:(human json)' \ + '--frozen[require lock and cache up to date]' \ + '--locked[require lock up to date]' + ;; + + uninstall) + _arguments \ + '--bin=[only uninstall the binary NAME]' \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[less output printed to stdout]' \ + '--root=[directory to uninstall packages from]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + update) + _arguments \ + '--aggressive=[force dependency update]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to update]:packages:__get_package_names' \ + '--precise=[update single dependency to PRECISE]: :' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + verify-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + version) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + yank) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[API token to use when authenticating]' \ + '--undo[undo a yank, putting a version back into the index]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '--vers[yank version]' \ + ;; + esac + ;; +esac +} + +_cargo_cmds(){ +local -a commands;commands=( +'bench:execute all benchmarks of a local package' +'build:compile the current project' +'check:check the current project without compiling' +'clean:remove generated artifacts' +'doc:build package documentation' +'fetch:fetch package dependencies' +'generate-lockfile:create lockfile' +'git-checkout:git checkout' +'help:get help for commands' +'init:create new project in current directory' +'install:install a Rust binary' +'locate-project:print "Cargo.toml" location' +'login:login to remote server' +'metadata:the metadata for a project in json' +'new:create a new project' +'owner:manage the owners of a crate on the registry' +'package:assemble local package into a distributable tarball' +'pkgid:print a fully qualified package specification' +'publish:upload package to the registry' +'read-manifest:print manifest in JSON format' +'run:run the main binary of the local package' +'rustc:compile a package and all of its dependencies' +'rustdoc:build documentation for a package' +'search:search packages on crates.io' +'test:execute all unit and tests of a local package' +'uninstall:remove a Rust binary' +'update:update dependencies' +'verify-project:check Cargo.toml' +'version:show version information' +'yank:remove pushed file from index' +) +_describe -t common-commands 'common commands' commands +} + +_cargo_all_cmds(){ +local -a commands;commands=($(cargo --list)) +_describe -t all-commands 'all commands' commands +} + + +#FIXME: Disabled until fixed +#gets package names from the manifest file +_get_package_names() +{ +} + +#TODO:see if it makes sense to have 'locate-project' to have non-json output. +#strips package name from json stuff +_locate_manifest(){ +local manifest=`cargo locate-project 2>/dev/null` +regexp-replace manifest '\{"root":"|"\}' '' +echo $manifest +} + +# Extracts the values of "name" from the array given in $1 and shows them as +# command line options for completion +_get_names_from_array() +{ + local -a filelist; + local manifest=$(_locate_manifest) + if [[ -z $manifest ]]; then + return 0 + fi + + local last_line + local -a names; + local in_block=false + local block_name=$1 + names=() + while read line + do + if [[ $last_line == "[[$block_name]]" ]]; then + in_block=true + else + if [[ $last_line =~ '.*\[\[.*' ]]; then + in_block=false + fi + fi + + if [[ $in_block == true ]]; then + if [[ $line =~ '.*name.*=' ]]; then + regexp-replace line '^.*name *= *|"' "" + names+=$line + fi + fi + + last_line=$line + done < $manifest + _describe $block_name names + +} + +#Gets the test names from the manifest file +_test_names() +{ + _get_names_from_array "test" +} + +#Gets the bench names from the manifest file +_benchmark_names() +{ + _get_names_from_array "bench" +} + +# These flags are mutually exclusive specifiers for the scope of a command; as +# they are used in multiple places without change, they are expanded into the +# appropriate command's `_arguments` where appropriate. +set command_scope_spec +command_scope_spec=( + '(--bin --example --test --lib)--bench=[benchmark name]: :_benchmark_names' + '(--bench --bin --test --lib)--example=[example name]' + '(--bench --example --test --lib)--bin=[binary name]' + '(--bench --bin --example --test)--lib=[library name]' + '(--bench --bin --example --lib)--test=[test name]' +) + +_cargo diff --git a/collector/compile-benchmarks/cargo/src/etc/cargo.bashcomp.sh b/collector/compile-benchmarks/cargo/src/etc/cargo.bashcomp.sh new file mode 100644 index 000000000..b68488dd4 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/cargo.bashcomp.sh @@ -0,0 +1,211 @@ +command -v cargo >/dev/null 2>&1 && +_cargo() +{ + local cur prev words cword cmd + _get_comp_words_by_ref cur prev words cword + + COMPREPLY=() + + cmd=${words[1]} + + local vcs='git hg none' + local color='auto always never' + local msg_format='human json' + + local opt_help='-h --help' + local opt_verbose='-v --verbose' + local opt_quiet='-q --quiet' + local opt_color='--color' + local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" + local opt_pkg='-p --package' + local opt_feat='--features --all-features --no-default-features' + local opt_mani='--manifest-path' + local opt_jobs='-j --jobs' + local opt_force='-f --force' + local opt_test='--test --bench' + local opt_lock='--frozen --locked' + + local opt___nocmd="$opt_common -V --version --list" + local opt__bench="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --no-run" + local opt__build="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release" + local opt__check="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release" + local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release" + local opt__doc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --bin --lib --target --open --no-deps --release" + local opt__fetch="$opt_common $opt_mani $opt_lock" + local opt__generate_lockfile="${opt__fetch}" + local opt__git_checkout="$opt_common $opt_lock --reference --url" + local opt__help="$opt_help" + local opt__init="$opt_common $opt_lock --bin --lib --name --vcs" + local opt__install="$opt_common $opt_feat $opt_jobs $opt_lock $opt_force --bin --branch --debug --example --git --list --path --rev --root --tag --vers" + local opt__locate_project="$opt_mani -h --help" + local opt__login="$opt_common $opt_lock --host" + local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version --no-deps" + local opt__new="$opt_common $opt_lock --vcs --bin --lib --name" + local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token" + local opt__package="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty -l --list --no-verify --no-metadata" + local opt__pkgid="${opt__fetch} $opt_pkg" + local opt__publish="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty --dry-run --host --token --no-verify" + local opt__read_manifest="$opt_help $opt_verbose $opt_mani $opt_color --no-deps" + local opt__run="$opt_common $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --target --bin --example --release" + local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --profile --target --lib --bin --example --release" + local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release --open" + local opt__search="$opt_common $opt_lock --host --limit" + local opt__test="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --all --doc --target --lib --bin --example --no-run --release --no-fail-fast" + local opt__uninstall="$opt_common $opt_lock --bin --root" + local opt__update="$opt_common $opt_pkg $opt_mani $opt_lock --aggressive --precise" + local opt__verify_project="${opt__fetch}" + local opt__version="$opt_help $opt_verbose $opt_color" + local opt__yank="$opt_common $opt_lock --vers --undo --index --token" + + if [[ $cword -eq 1 ]]; then + if [[ "$cur" == -* ]]; then + COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) + else + COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) + fi + elif [[ $cword -ge 2 ]]; then + case "${prev}" in + --vcs) + COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) + ;; + --color) + COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) + ;; + --message-format) + COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) ) + ;; + --manifest-path) + _filedir toml + ;; + --bin) + COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) + ;; + --test) + COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) + ;; + --bench) + COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) + ;; + --example) + COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) + ;; + --target) + COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) + ;; + help) + COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) + ;; + *) + local opt_var=opt__${cmd//-/_} + COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) + ;; + esac + fi + + # compopt does not work in bash version 3 + + return 0 +} && +complete -F _cargo cargo + +__cargo_commands=$(cargo --list 2>/dev/null | tail -n +2) + +_locate_manifest(){ + local manifest=`cargo locate-project 2>/dev/null` + # regexp-replace manifest '\{"root":"|"\}' '' + echo ${manifest:9:-2} +} + +# Extracts the values of "name" from the array given in $1 and shows them as +# command line options for completion +_get_names_from_array() +{ + local manifest=$(_locate_manifest) + if [[ -z $manifest ]]; then + return 0 + fi + + local last_line + local -a names + local in_block=false + local block_name=$1 + while read line + do + if [[ $last_line == "[[$block_name]]" ]]; then + in_block=true + else + if [[ $last_line =~ .*\[\[.* ]]; then + in_block=false + fi + fi + + if [[ $in_block == true ]]; then + if [[ $line =~ .*name.*\= ]]; then + line=${line##*=} + line=${line%%\"} + line=${line##*\"} + names+=($line) + fi + fi + + last_line=$line + done < $manifest + echo "${names[@]}" +} + +#Gets the bin names from the manifest file +_bin_names() +{ + _get_names_from_array "bin" +} + +#Gets the test names from the manifest file +_test_names() +{ + _get_names_from_array "test" +} + +#Gets the bench names from the manifest file +_benchmark_names() +{ + _get_names_from_array "bench" +} + +_get_examples(){ + local files=($(dirname $(_locate_manifest))/examples/*.rs) + local names=("${files[@]##*/}") + local names=("${names[@]%.*}") + # "*" means no examples found + if [[ "${names[@]}" != "*" ]]; then + echo "${names[@]}" + fi +} + +_get_targets(){ + local CURRENT_PATH + if [ `uname -o` == "Cygwin" -a -f "$PWD"/Cargo.toml ]; then + CURRENT_PATH=$PWD + else + CURRENT_PATH=$(_locate_manifest) + fi + if [[ -z "$CURRENT_PATH" ]]; then + return 1 + fi + local TARGETS=() + local FIND_PATHS=( "/" ) + local FIND_PATH LINES LINE + while [[ "$CURRENT_PATH" != "/" ]]; do + FIND_PATHS+=( "$CURRENT_PATH" ) + CURRENT_PATH=$(dirname $CURRENT_PATH) + done + for FIND_PATH in ${FIND_PATHS[@]}; do + if [[ -f "$FIND_PATH"/.cargo/config ]]; then + LINES=( `grep "$FIND_PATH"/.cargo/config -e "^\[target\."` ) + for LINE in ${LINES[@]}; do + TARGETS+=(`sed 's/^\[target\.\(.*\)\]$/\1/' <<< $LINE`) + done + fi + done + echo "${TARGETS[@]}" +} +# vim:ft=sh diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-bench.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-bench.1 new file mode 100644 index 000000000..dfb9ee4ec --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-bench.1 @@ -0,0 +1,143 @@ +.TH "CARGO\-BENCH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-bench \- Execute benchmarks of a package +.SH SYNOPSIS +.PP +\f[I]cargo bench\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Execute all benchmarks of a local package. +.PP +All of the trailing arguments are passed to the benchmark binaries +generated for filtering benchmarks and generally providing options +configuring how they run. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +The \f[B]\-\-jobs\f[] argument affects the building of the benchmark +executable but does not affect how many jobs are used when running the +benchmarks. +.PP +Compilation can be customized with the \[aq]bench\[aq] profile in the +manifest. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-lib +Benchmark only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Benchmark only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Benchmark only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Benchmark only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Benchmark only the specified bench target. +.RS +.RE +.TP +.B \-\-no\-run +Compile, but don\[aq]t run benchmarks. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to benchmarks for. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Execute all the benchmarks of the current package +.IP +.nf +\f[C] +$\ cargo\ bench +\f[] +.fi +.PP +Execute the BENCH benchmark +.IP +.nf +\f[C] +$\ cargo\ bench\ \-\-bench\ BENCH +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1), cargo\-test(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-build.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-build.1 new file mode 100644 index 000000000..18c16c63d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-build.1 @@ -0,0 +1,132 @@ +.TH "CARGO\-BUILD" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-build \- Compile the current project +.SH SYNOPSIS +.PP +\f[I]cargo build\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Compile a local package and all of its dependencies. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +Compilation can be configured via the use of profiles which are +configured in the manifest. +The default profile for this command is \f[I]dev\f[], but passing the +\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to build. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ build +\f[] +.fi +.PP +Build a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ build\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-check.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-check.1 new file mode 100644 index 000000000..0931bf0e9 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-check.1 @@ -0,0 +1,132 @@ +.TH "CARGO\-CHECK" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-check \- Check the current project +.SH SYNOPSIS +.PP +\f[I]cargo check\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Check a local package and all of its dependencies. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be checked. +If it is not given, then the current package is checked. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +Compilation can be configured via the use of profiles which are +configured in the manifest. +The default profile for this command is \f[I]dev\f[], but passing the +\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to check. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Check only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Check only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Check only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Check only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Check only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Check artifacts in release mode. +.RS +.RE +.TP +.B \-\-all\-features +Check with all available features. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also check. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not check the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Check for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Check a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ check +\f[] +.fi +.PP +Check a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ check\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-clean.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-clean.1 new file mode 100644 index 000000000..6777c984f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-clean.1 @@ -0,0 +1,82 @@ +.TH "CARGO\-CLEAN" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-clean \- Remove generated artifacts +.SH SYNOPSIS +.PP +\f[I]cargo clean\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Remove artifacts that cargo has generated in the past. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to clean artifacts for. +.RS +.RE +.TP +.B \-\-manifest\-path PATH +Path to the manifest to the package to clean. +.RS +.RE +.TP +.B \-\-target TRIPLE +Target triple to clean output for (default all). +.RS +.RE +.TP +.B \-\-release +Whether or not to clean release artifacts. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Remove local package generated artifacts +.IP +.nf +\f[C] +$\ cargo\ clean +\f[] +.fi +.PP +Clean release artifacts +.IP +.nf +\f[C] +$\ cargo\ clean\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-doc.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-doc.1 new file mode 100644 index 000000000..f910957c6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-doc.1 @@ -0,0 +1,109 @@ +.TH "CARGO\-DOC" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-doc \- Build a package\[aq]s documentation +.SH SYNOPSIS +.PP +\f[I]cargo doc\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Build a package\[aq]s documentation. +.PP +By default the documentation for the local package and all dependencies +is built. +The output is all placed in \[aq]target/doc\[aq] in rustdoc\[aq]s usual +format. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to document. +.RS +.RE +.TP +.B \-\-open +Opens the docs in a browser after the operation. +.RS +.RE +.TP +.B \-\-no\-deps +Don\[aq]t build documentation for dependencies. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package documentation in \[aq]target/doc\[aq] +.IP +.nf +\f[C] +$\ cargo\ doc +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-fetch.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-fetch.1 new file mode 100644 index 000000000..96c49ab88 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-fetch.1 @@ -0,0 +1,52 @@ +.TH "CARGO\-FETCH" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-fetch \- Fetch dependencies of a package from the network +.SH SYNOPSIS +.PP +\f[I]cargo fetch\f[] [OPTIONS] +.SH DESCRIPTION +.PP +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to fetch dependencies for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-update(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-generate-lockfile.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-generate-lockfile.1 new file mode 100644 index 000000000..313471a04 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-generate-lockfile.1 @@ -0,0 +1,41 @@ +.TH "CARGO\-GENERATE LOCKFILE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-generate-lockfile \- Generate the lockfile for a project +.SH SYNOPSIS +.PP +\f[I]cargo generate-lockfile\f[] [OPTIONS] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to generate a lockfile for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-init.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-init.1 new file mode 100644 index 000000000..a2b392ad1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-init.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-INIT" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-init \- Create a new cargo package in the current directory +.SH SYNOPSIS +.PP +\f[I]cargo init\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Create a new cargo package in the current directory. +.PP +Use the \f[B]\-\-vcs\f[] option to control the version control system to +use. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vcs \f[I]VCS\f[] +Initialize a new repository for the given version control system (git or +hg) or do not initialize any version control at all (none) overriding a +global configuration. +.RS +.RE +.TP +.B \-\-bin +Use a binary instead of a library template. +.RS +.RE +.TP +.B \-\-name \f[I]NAME\f[] +Set the resulting package name. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Initialize a binary cargo package in the current directory +.IP +.nf +\f[C] +$\ cargo\ init\ \-\-bin +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-new(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-install.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-install.1 new file mode 100644 index 000000000..b85e68a49 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-install.1 @@ -0,0 +1,157 @@ +.TH "CARGO\-INSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-install \- Install a Rust binary +.SH SYNOPSIS +.PP +\f[I]cargo install\f[] [OPTIONS] +.PP +\f[I]cargo install\f[] [OPTIONS] \-\-list +.SH DESCRIPTION +.PP +Install a Rust binary +.PP +This command manages Cargo\[aq]s local set of install binary crates. +Only packages which have [[bin]] targets can be installed, and all +binaries are installed into the installation root\[aq]s \f[I]bin\f[] +folder. +The installation root is determined, in order of precedence, by +\f[B]\-\-root\f[], \f[I]$CARGO_INSTALL_ROOT\f[], the +\f[I]install.root\f[] configuration key, and finally the home directory +(which is either \f[I]$CARGO_HOME\f[] if set or \f[I]$HOME/.cargo\f[] by +default). +.PP +There are multiple sources from which a crate can be installed. +The default location is crates.io but the \f[B]\-\-git\f[] and +\f[B]\-\-path\f[] flags can change this source. +If the source contains more than one package (such as \f[I]crates.io\f[] +or a git repository with multiple crates) the \f[B]\f[] argument is +required to indicate which crate should be installed. +.PP +Crates from crates.io can optionally specify the version they wish to +install via the \f[B]\-\-vers\f[] flags, and similarly packages from git +repositories can optionally specify the branch, tag, or revision that +should be installed. +If a crate has multiple binaries, the \f[B]\-\-bin\f[] argument can +selectively install only one of them, and if you\[aq]d rather install +examples the \f[B]\-\-example\f[] argument can be used as well. +.PP +As a special convenience, omitting the specification entirely +will install the crate in the current directory. +That is, \f[I]install\f[] is equivalent to the more explicit "install +\-\-path .". +.PP +The \f[B]\-\-list\f[] option will list all installed packages (and their +versions). +.SH OPTIONS +.SS Query options +.TP +.B \-\-list +List all installed packages (and their versions). +.RS +.RE +.SS Specifying what crate to install +.TP +.B \-\-vers \f[I]VERS\f[] +Specify a version to install from crates.io. +.RS +.RE +.TP +.B \-\-git \f[I]URL\f[] +Git URL to install the specified crate from. +.RS +.RE +.TP +.B \-\-branch \f[I]BRANCH\f[] +Branch to use when installing from git. +.RS +.RE +.TP +.B \-\-tag \f[I]TAG\f[] +Tag to use when installing from git. +.RS +.RE +.TP +.B \-\-rev \f[I]SHA\f[] +Specific commit to use when installing from git. +.RS +.RE +.TP +.B \-\-path \f[I]PATH\f[] +Filesystem path to local crate to install. +.RS +.RE +.SS Built and install options +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to activate. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-f, \-\-force +Force overwriting existing crates or binaries +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-debug +Build in debug mode instead of release mode. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Only install the binary NAME. +.RS +.RE +.TP +.B \-\-example \f[I]EXAMPLE\f[] +Install the example EXAMPLE instead of binaries. +.RS +.RE +.TP +.B \-\-root \f[I]DIR\f[] +Directory to install packages into. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-search(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-login.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-login.1 new file mode 100644 index 000000000..a82c8284b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-login.1 @@ -0,0 +1,41 @@ +.TH "CARGO\-LOGIN" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-login \- Save an API token from the registry locally +.SH SYNOPSIS +.PP +\f[I]cargo login\f[] [OPTIONS] [] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host to set the token for +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-metadata.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-metadata.1 new file mode 100644 index 000000000..69d72535c --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-metadata.1 @@ -0,0 +1,71 @@ +.TH "CARGO\-METADATA" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-metadata \- Machine-readable metadata about the current project +.SH SYNOPSIS +.PP +\f[I]cargo metadata\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Output the resolved dependencies of a project, the concrete used versions +including overrides, in machine-readable format. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space-separated list of features. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not include the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-no\-deps +Output information only about the root package and don\[aq]t fetch +dependencies. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest. +.RS +.RE +.TP +.B \-\-format\-version \f[I]VERSION\f[] +Format version [default: 1]. Valid values: 1. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-new.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-new.1 new file mode 100644 index 000000000..7325c5b2d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-new.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-NEW" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-new \- Create a new cargo package +.SH SYNOPSIS +.PP +\f[I]cargo new\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Create a new cargo package at . +.PP +Use the \f[B]\-\-vcs\f[] option to control the version control system to +use. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vcs \f[I]VCS\f[] +Initialize a new repository for the given version control system (git or +hg) or do not initialize any version control at all (none) overriding a +global configuration. +.RS +.RE +.TP +.B \-\-bin +Use a binary instead of a library template. +.RS +.RE +.TP +.B \-\-name \f[I]NAME\f[] +Set the resulting package name. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Create a binary cargo package in the current directory +.IP +.nf +\f[C] +$\ cargo\ new\ \-\-bin\ ./ +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-init(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-owner.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-owner.1 new file mode 100644 index 000000000..c690dc048 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-owner.1 @@ -0,0 +1,88 @@ +.TH "CARGO\-OWNER" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-owner \- Manage the owners of a crate of the registry +.SH SYNOPSIS +.PP +\f[I]cargo owner\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +This command will modify the owners for a package on the specified +registry (or default). Note that owners of a package can upload new +versions, and yank old versions. Explicitly named owners can also modify +the set of owners, so take caution! +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-a, \-\-add \f[I]LOGIN\f[] +Name of a user or team to add as an owner. +.RS +.RE +.TP +.B \-r, \-\-remove \f[I]LOGIN\f[] +Name of a user or team to remove as an owner. +.RS +.RE +.TP +.B \-l, \-\-list +List owners of a crate. +.RS +.RE +.TP +.B \-\-index \f[I]INDEX\f[] +Registry index to modify owners for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Add user as an owner of the current package +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-add\ user +\f[] +.fi +.PP +Remove user as an owner of the current package +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-remove\ user +\f[] +.fi +.PP +Use a certain API token to authenticate with +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-token\ U6WHXacP3Qqwd5kze1fohr4JEOmGCuRK2 +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-publish(1), cargo\-login(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-package.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-package.1 new file mode 100644 index 000000000..fc703b2fa --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-package.1 @@ -0,0 +1,59 @@ +.TH "CARGO\-PACKAGE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-package \- Create a distributable tarball +.SH SYNOPSIS +.PP +\f[I]cargo package\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Assemble the local package into a distributable tarball. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-l, \-\-list +Print files included in a package without making one. +.RS +.RE +.TP +.B \-\-no\-verify +Don\[aq]t verify the contents by building them. +.RS +.RE +.TP +.B \-\-no\-metadata +Ignore warnings about a lack of human\-usable metadata. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-pkgid.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-pkgid.1 new file mode 100644 index 000000000..d06da2dde --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-pkgid.1 @@ -0,0 +1,75 @@ +.TH "CARGO\-PKGID" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-pkgid \- Print a fully qualified package specification +.SH SYNOPSIS +.PP +\f[I]cargo pkgid\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +Given a argument, print out the fully qualified package id +specifier. This command will generate an error if is ambiguous as +to which package it refers to in the dependency graph. If no is +given, then the pkgid for the local package is printed. +.PP +This command requires that a lockfile is available and dependencies have +been fetched. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to the package to clean. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Retrieve package specification for foo package +.IP +.nf +\f[C] +$\ cargo\ pkgid\ foo +\f[] +.fi +.PP +Retrieve package specification for version 1.0.0 of foo +.IP +.nf +\f[C] +$\ cargo\ pkgid\ foo:1.0.0 +\f[] +.fi +.PP +Retrieve package specification for foo from crates.io +.IP +.nf +\f[C] +$\ cargo\ pkgid\ crates.io/foo +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-generate\-lockfile(1), cargo-search(1), cargo-metadata(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-publish.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-publish.1 new file mode 100644 index 000000000..2f5063139 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-publish.1 @@ -0,0 +1,59 @@ +.TH "CARGO\-PUBLISH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-publish \- Upload a package to the registry. +.SH SYNOPSIS +.PP +\f[I]cargo publish\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Upload a package to the registry. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host to upload the package to. +.RS +.RE +.TP +.B \-\-token \f[I]TOKEN\f[] +Token to use when uploading. +.RS +.RE +.TP +.B \-\-no\-verify +Don\[aq]t verify package tarball before publish. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest of the package to publish. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-install(1), cargo\-search(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-run.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-run.1 new file mode 100644 index 000000000..80473d2be --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-run.1 @@ -0,0 +1,103 @@ +.TH "CARGO\-RUN" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-run \- Run the current project +.SH SYNOPSIS +.PP +\f[I]cargo run\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Run the main binary of the local package (src/main.rs). +.PP +If neither \f[B]\-\-bin\f[] nor \f[B]\-\-example\f[] are given, then if +the project only has one bin target it will be run. +Otherwise \f[B]\-\-bin\f[] specifies the bin target to run, and +\f[B]\-\-example\f[] specifies the example target to run. +At most one of \f[B]\-\-bin\f[] or \f[B]\-\-example\f[] can be provided. +.PP +All of the trailing arguments are passed to the binary to run. +If you\[aq]re passing arguments to both Cargo and the binary, the ones +after \f[B]\-\-\f[] go to the binary, the ones before go to Cargo. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Name of the bin target to run. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Name of the example target to run. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Run the main binary of the current package +.IP +.nf +\f[C] +$\ cargo\ run +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-new(1), cargo\-init(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustc.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustc.1 new file mode 100644 index 000000000..f5d9a3521 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustc.1 @@ -0,0 +1,126 @@ +.TH "CARGO\-RUSTC" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-rustc \- Compile a package and all of its dependencies +.SH SYNOPSIS +.PP +\f[I]cargo rustc\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +.PP +The specified target for the current package (or package specified by +SPEC if provided) will be compiled along with all of its dependencies. +The specified ... +will all be passed to the final compiler invocation, not any of the +dependencies. +Note that the compiler will still unconditionally receive arguments such +as \-L, \-\-extern, and \-\-crate\-type, and the specified ... +will simply be added to the compiler invocation. +.PP +This command requires that only one target is being compiled. +If more than one target is available for the current package the filters +of \-\-lib, \-\-bin, etc, must be used to select which target is +compiled. +To pass flags to all compiler processes spawned by Cargo, use the +$RUSTFLAGS environment variable or the \f[C]build.rustflags\f[] +configuration option. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package SPEC\f[] +The profile to compiler for. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-profile \f[I]PROFILE +Profile to build the selected target for. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +The version to yank or un\-yank. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not compile default features for the package. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Target triple which compiles will be for. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to fetch dependencies for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-run(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustdoc.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustdoc.1 new file mode 100644 index 000000000..3a898a31a --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-rustdoc.1 @@ -0,0 +1,124 @@ +.TH "CARGO\-RUSTDOC" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-rustdoc \- Build a package\[aq]s documentation, using specified +custom flags. + +.SH SYNOPSIS +.PP +\f[I]cargo rustdoc\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +The specified target for the current package (or package specified by +SPEC if provided) will be documented with the specified ... +being passed to the final rustdoc invocation. +Dependencies will not be documented as part of this command. +Note that rustdoc will still unconditionally receive arguments such as +\-L, \-\-extern, and \-\-crate\-type, and the specified ... +will simply be added to the rustdoc invocation. +.PP +If the \-\-package argument is given, then SPEC is a package id +specification which indicates which package should be documented. +If it is not given, then the current package is documented. +For more information on SPEC and its format, see the +\f[C]cargo\ help\ pkgid\f[] command. + +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-open +Open the docs in a browser after the operation. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC\f[] +Package to document. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to document. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo-doc(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-search.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-search.1 new file mode 100644 index 000000000..e8b1da3ca --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-search.1 @@ -0,0 +1,49 @@ +.TH "CARGO\-SEARCH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-search \- Search packages in crates.io +.SH SYNOPSIS +.PP +\f[I]cargo search\f[] [OPTIONS] ... +.SH DESCRIPTION +.PP +Search packages in \f[I]crates.io\f[]. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host of a registry to search in. +.RS +.RE +.TP +.B \-\-limit \f[I]LIMIT\f[] +Limit the number of results (default: 10, max: 100). +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-install(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-test.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-test.1 new file mode 100644 index 000000000..2d9907f0b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-test.1 @@ -0,0 +1,172 @@ +.TH "CARGO\-TEST" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-test \- Execute unit and integration tests of a package +.SH SYNOPSIS +.PP +\f[I]cargo test\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Execute all unit and integration tests of a local package. +.PP +All of the trailing arguments are passed to the test binaries generated +for filtering tests and generally providing options configuring how they +run. +For example, this will run all tests with the name \[aq]foo\[aq] in +their name: +.IP +.nf +\f[C] +cargo\ test\ foo +\f[] +.fi +.PP +If the \f[B]\-\-package\f[] argument is given, then \[aq]SPEC\[aq] is a +package id specification which indicates which package should be tested. +If it is not given, then the current package is tested. +For more information on \[aq]SPEC\[aq] and its format, see the "cargo +help pkgid" command. +.PP +The \f[B]\-\-jobs\f[] argument affects the building of the test +executable but does not affect how many jobs are used when running the +tests. +.PP +Compilation can be configured via the \[aq]test\[aq] profile in the +manifest. +.PP +By default the rust test harness hides output from test execution to +keep results readable. +Test output can be recovered (e.g. +for debugging) by passing \f[B]\-\-nocapture\f[] to the test binaries: +.IP +.nf +\f[C] +cargo\ test\ \-\-\ \-\-nocapture +\f[] +.fi +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-lib +Test only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-doc +Test only this library\[aq]s documentation +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Test only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Test only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Test only the specified integration test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Test only the specified benchmark target. +.RS +.RE +.TP +.B \-\-no\-run +Compile, but don\[aq]t run tests. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to run tests for. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-\-no\-fail\-fast +Run all tests regardless of failure. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Execute all the unit and integration tests of the current package +.IP +.nf +\f[C] +$\ cargo\ test +\f[] +.fi +.PP +Execute the BENCH benchmark +.IP +.nf +\f[C] +$\ cargo\ test\ \-\-bench\ BENCH +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-uninstall.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-uninstall.1 new file mode 100644 index 000000000..64e9aa7f0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-uninstall.1 @@ -0,0 +1,56 @@ +.TH "CARGO\-UNINSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-uninstall \- Remove a Rust binary +.SH SYNOPSIS +.PP +\f[I]cargo uninstall\f[] [OPTIONS] +.PP +\f[I]cargo uninstall\f[] (\-h | \-\-help) +.SH DESCRIPTION +.PP +The argument SPEC is a package id specification (see +\f[C]cargo\ help\ pkgid\f[]) to specify which crate should be +uninstalled. +By default all binaries are uninstalled for a crate but the +\f[C]\-\-bin\f[] and \f[C]\-\-example\f[] flags can be used to only +uninstall particular binaries. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-root \f[I]DIR\f[] +Directory to uninstall packages from. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Only uninstall the binary NAME. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo-install(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-update.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-update.1 new file mode 100644 index 000000000..14b64374d --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-update.1 @@ -0,0 +1,80 @@ +.TH "CARGO\-UPDATE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-update \- Update the package dependencies +.SH SYNOPSIS +.PP +\f[I]cargo update\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Update dependencies as recorded in the local lock file. +.PP +This command requires that a \f[I]Cargo.lock\f[] already exists as +generated by \f[I]cargo build\f[] or related commands. +.PP +If \f[I]SPEC\f[] is given, then a conservative update of the +\f[I]lockfile\f[] will be performed. +This means that only the dependency specified by \f[I]SPEC\f[] will be +updated. +Its transitive dependencies will be updated only if \f[I]SPEC\f[] cannot +be updated without updating dependencies. +All other dependencies will remain locked at their currently recorded +versions. +.PP +If \f[I]PRECISE\f[] is specified, then \f[B]\-\-aggressive\f[] must not +also be specified. +The argument \f[I]PRECISE\f[] is a string representing a precise +revision that the package being updated should be updated to. +For example, if the package comes from a git repository, then +\f[I]PRECISE\f[] would be the exact revision that the repository should +be updated to. +.PP +If \f[I]SPEC\f[] is not given, then all dependencies will be +re\-resolved and updated. +.PP +For more information about package id specifications, see "cargo help +pkgid". +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to update. +.RS +.RE +.TP +.B \-\-aggressive +Force updating all dependencies of as well. +.RS +.RE +.TP +.B \-\-precise \f[I]PRECISE\f[] +Update a single dependency to exactly \f[I]PRECISE\f[]. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-version.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-version.1 new file mode 100644 index 000000000..c78344d3f --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-version.1 @@ -0,0 +1,31 @@ +.TH "CARGO\-VERSION" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-version \- Show version information +.SH SYNOPSIS +.PP +\f[I]cargo version\f[] [OPTIONS] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo-yank.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo-yank.1 new file mode 100644 index 000000000..f54b2bd6b --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo-yank.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-YANK" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-yank \- Remove a pushed crate from the index +.SH SYNOPSIS +.PP +\f[I]cargo yank\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +The yank command removes a previously pushed crate\[aq]s version from +the server\[aq]s index. +This command does not delete any data, and the crate will still be +available for download via the registry\[aq]s download link. +.PP +Note that existing crates locked to a yanked version will still be able +to download the yanked version to use it. +Cargo will, however, not allow any new crates to be locked to any yanked +version. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vers \f[I]VERSION\f[] +The version to yank or un-yank. +.RS +.RE +.TP +.B \-\-undo +Undo a yank, putting a version back into the index. +.RS +.RE +.TP +.B \-\-index \f[I]INDEX\f[] +Registry index to yank from. +.RS +.RE +.TP +.B \-\-token \f[I]TOKEN\f[] +API token to use when authenticating. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-owner(1), cargo\-version(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/src/etc/man/cargo.1 b/collector/compile-benchmarks/cargo/src/etc/man/cargo.1 new file mode 100644 index 000000000..8baedcec9 --- /dev/null +++ b/collector/compile-benchmarks/cargo/src/etc/man/cargo.1 @@ -0,0 +1,206 @@ +.TH "CARGO" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo \- The Rust package manager +.SH SYNOPSIS +.PP +\f[I]cargo\f[] [...] +.SH DESCRIPTION +.PP +This program is a package manager for the Rust language, available at +. +.SH OPTIONS +.TP +.B \-h, \-\-help +Display a help message. +.RS +.RE +.TP +.B \-V, \-\-version +Print version information and exit. +.RS +.RE +.TP +.B \-\-list +List all available cargo commands. +.RS +.RE +.TP +.B \-\-explain CODE +Run \f[C]rustc\ \-\-explain\ CODE\f[] +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-\-color +Configure coloring of output. +.RS +.RE +.SH COMMANDS +.PP +To get extended information about commands, run \f[I]cargo help +\f[] or \f[I]man cargo\-command\f[] +.TP +.B cargo\-build(1) +Compile the current project. +.RS +.RE +.TP +.B cargo\-clean(1) +Remove the target directory with build output. +.RS +.RE +.TP +.B cargo\-doc(1) +Build this project\[aq]s and its dependencies\[aq] documentation. +.RS +.RE +.TP +.B cargo\-init(1) +Create a new cargo project in the current directory. +.RS +.RE +.TP +.B cargo\-install(1) +Install a Rust binary. +.RS +.RE +.TP +.B cargo\-new(1) +Create a new cargo project. +.RS +.RE +.TP +.B cargo\-run(1) +Build and execute src/main.rs. +.RS +.RE +.TP +.B cargo\-test(1) +Run the tests for the package. +.RS +.RE +.TP +.B cargo\-bench(1) +Run the benchmarks for the package. +.RS +.RE +.TP +.B cargo\-update(1) +Update dependencies in Cargo.lock. +.RS +.RE +.TP +.B cargo\-rustc(1) +Compile the current project, and optionally pass additional rustc parameters +.RS +.RE +.TP +.B cargo\-package(1) +Generate a source tarball for the current package. +.RS +.RE +.TP +.B cargo\-publish(1) +Package and upload this project to the registry. +.RS +.RE +.TP +.B cargo\-owner(1) +Manage the owners of a crate on the registry. +.RS +.RE +.TP +.B cargo\-uninstall(1) +Remove a Rust binary. +.RS +.RE +.TP +.B cargo\-search(1) +Search registry for crates. +.RS +.RE +.TP +.B cargo\-help(1) +Display help for a cargo command +.RS +.RE +.TP +.B cargo\-version(1) +Print cargo\[aq]s version and exit. +.RS +.RE +.SH FILES +.TP +.B ~/.cargo +Directory in which Cargo stores repository data. +Cargo can be instructed to use a \f[I]\&.cargo\f[] subdirectory in a +different location by setting the \f[B]CARGO_HOME\f[] environment +variable. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ build +\f[] +.fi +.PP +Build a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ build\ \-\-release +\f[] +.fi +.PP +Run tests for a cross\-compiled target +.IP +.nf +\f[C] +$\ cargo\ test\ \-\-target\ i686\-unknown\-linux\-gnu +\f[] +.fi +.PP +Create a new project that builds an executable +.IP +.nf +\f[C] +$\ cargo\ new\ \-\-bin\ foobar +\f[] +.fi +.PP +Create a project in the current directory +.IP +.nf +\f[C] +$\ mkdir\ foo\ &&\ cd\ foo +$\ cargo\ init\ . +\f[] +.fi +.PP +Learn about a command\[aq]s options and usage +.IP +.nf +\f[C] +$\ cargo\ help\ clean +\f[] +.fi +.SH SEE ALSO +.PP +rustc(1), rustdoc(1) +.SH BUGS +.PP +See for issues. +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/collector/compile-benchmarks/cargo/tests/bad-config.rs b/collector/compile-benchmarks/cargo/tests/bad-config.rs new file mode 100644 index 000000000..831f5ced4 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/bad-config.rs @@ -0,0 +1,1097 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +#[test] +fn bad1() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [target] + nonexistent-target = "foo" + "#) + .build(); + assert_that(p.cargo("build").arg("-v") + .arg("--target=nonexistent-target"), + execs().with_status(101).with_stderr("\ +[ERROR] expected table for configuration key `target.nonexistent-target`, \ +but found string in [..]config +")); +} + +#[test] +fn bad2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [http] + proxy = 3.0 + "#) + .build(); + assert_that(p.cargo("publish").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + failed to load TOML configuration from `[..]config` + +Caused by: + failed to parse key `http` + +Caused by: + failed to parse key `proxy` + +Caused by: + found TOML configuration value of unknown type `float` +")); +} + +#[test] +fn bad3() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [http] + proxy = true + "#) + .build(); + Package::new("foo", "1.0.0").publish(); + + assert_that(p.cargo("publish").arg("-v"), + execs().with_status(101).with_stderr("\ +error: failed to update registry [..] + +Caused by: + invalid configuration for key `http.proxy` +expected a string, but found a boolean for `http.proxy` in [..]config +")); +} + +#[test] +fn bad4() { + let p = project("foo") + .file(".cargo/config", r#" + [cargo-new] + name = false + "#) + .build(); + assert_that(p.cargo("new").arg("-v").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] Failed to create project `foo` at `[..]` + +Caused by: + invalid configuration for key `cargo-new.name` +expected a string, but found a boolean for `cargo-new.name` in [..]config +")); +} + +#[test] +fn bad5() { + let p = project("foo") + .file(".cargo/config", r#" + foo = "" + "#) + .file("foo/.cargo/config", r#" + foo = 2 + "#) + .build(); + assert_that(p.cargo("new") + .arg("-v").arg("foo").cwd(&p.root().join("foo")), + execs().with_status(101).with_stderr("\ +[ERROR] Failed to create project `foo` at `[..]` + +Caused by: + Couldn't load Cargo configuration + +Caused by: + failed to merge configuration at `[..]` + +Caused by: + failed to merge key `foo` between files: + file 1: [..]foo[..]foo[..]config + file 2: [..]foo[..]config + +Caused by: + expected integer, but found string +")); +} + +#[test] +fn bad_cargo_config_jobs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + jobs = -1 + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] build.jobs must be positive, but found -1 in [..] +")); +} + +#[test] +fn default_cargo_config_jobs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + jobs = 1 + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn good_cargo_config_jobs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + jobs = 4 + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn invalid_global_config() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file(".cargo/config", "4") + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found eof at line 1 +")); +} + +#[test] +fn bad_cargo_lock() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("Cargo.lock", "[[package]]\nfoo = 92") + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse lock file at: [..]Cargo.lock + +Caused by: + missing field `name` for key `package` +")); +} + +#[test] +fn duplicate_packages_in_cargo_lock() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" + [[package]] + name = "bar" + version = "0.0.1" + dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "foo" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "foo" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + "#) + .build(); + + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + package `foo` is specified twice in the lockfile +")); +} + +#[test] +fn bad_source_in_cargo_lock() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" + [[package]] + name = "bar" + version = "0.0.1" + dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "foo" + version = "0.1.0" + source = "You shall not parse" + "#) + .build(); + + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + invalid source `You shall not parse` for key `package.source` +")); +} + +#[test] +fn bad_dependency_in_lockfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + "#) + .build(); + + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + package `bar 0.1.0 ([..])` is specified as a dependency, but is missing from the package list +")); + +} + +#[test] +fn bad_git_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = { git = "file:.." } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository `file:///` +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update file:/// + +Caused by: + failed to clone into: [..] + +Caused by: + [[..]] 'file:///' is not a valid local file URI +")); +} + +#[test] +fn bad_crate_type() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + crate-type = ["bad_type", "rlib"] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains("\ +error: failed to run `rustc` to learn about target-specific information +")); +} + +#[test] +fn malformed_override() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [target.x86_64-apple-darwin.freetype] + native = { + foo: "bar" + } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected a table key, found a newline at line 8 +")); +} + +#[test] +fn duplicate_binary_names() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bin]] + name = "e" + path = "a.rs" + + [[bin]] + name = "e" + path = "b.rs" + "#) + .file("a.rs", r#"fn main() -> () {}"#) + .file("b.rs", r#"fn main() -> () {}"#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate binary name e, but all binary targets must have a unique name +")); +} + +#[test] +fn duplicate_example_names() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[example]] + name = "ex" + path = "examples/ex.rs" + + [[example]] + name = "ex" + path = "examples/ex2.rs" + "#) + .file("examples/ex.rs", r#"fn main () -> () {}"#) + .file("examples/ex2.rs", r#"fn main () -> () {}"#) + .build(); + + assert_that(p.cargo("build").arg("--example").arg("ex"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate example name ex, but all example targets must have a unique name +")); +} + +#[test] +fn duplicate_bench_names() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bench]] + name = "ex" + path = "benches/ex.rs" + + [[bench]] + name = "ex" + path = "benches/ex2.rs" + "#) + .file("benches/ex.rs", r#"fn main () {}"#) + .file("benches/ex2.rs", r#"fn main () {}"#) + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate bench name ex, but all bench targets must have a unique name +")); +} + +#[test] +fn duplicate_deps() { + let p = project("foo") + .file("shim-bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("shim-bar/src/lib.rs", r#" + pub fn a() {} + "#) + .file("linux-bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("linux-bar/src/lib.rs", r#" + pub fn a() {} + "#) + .file("Cargo.toml", r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +")); +} + +#[test] +fn duplicate_deps_diff_sources() { + let p = project("foo") + .file("shim-bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("shim-bar/src/lib.rs", r#" + pub fn a() {} + "#) + .file("linux-bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("linux-bar/src/lib.rs", r#" + pub fn a() {} + "#) + .file("Cargo.toml", r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [target.i686-unknown-linux-gnu.dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +")); +} + +#[test] +fn unused_keys() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [target.foo] + bar = "3" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +warning: unused manifest key: target.foo.bar +[COMPILING] foo v0.1.0 (file:///[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + bulid = "foo" + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +warning: unused manifest key: project.bulid +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + build = "foo" + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +warning: unused manifest key: lib.build +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + + +#[test] +fn empty_dependencies() { + let p = project("empty_deps") + .file("Cargo.toml", r#" + [package] + name = "empty_deps" + version = "0.0.0" + authors = [] + + [dependencies] + foo = {} + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr_contains("\ +warning: dependency (foo) specified without providing a local path, Git repository, or version \ +to use. This will be considered an error in future versions +")); +} + +#[test] +fn invalid_toml_historically_allowed_is_warned() { + let p = project("empty_deps") + .file("Cargo.toml", r#" + [package] + name = "empty_deps" + version = "0.0.0" + authors = [] + "#) + .file(".cargo/config", r#" + [foo] bar = 2 + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +warning: TOML file found which contains invalid syntax and will soon not parse +at `[..]config`. + +The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future. +[COMPILING] empty_deps v0.0.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn ambiguous_git_reference() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1" + branch = "master" + tag = "some-tag" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_stderr_contains("\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `branch`, `tag` or `rev` is allowed. \ +This will be considered an error in future versions +")); +} + +#[test] +fn bad_source_config1() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: no source URL specified for `source.foo`, need [..] +")); +} + +#[test] +fn bad_source_config2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + could not find a configured source with the name `bar` \ + when attempting to lookup `crates-io` (configuration in [..]) +")); +} + +#[test] +fn bad_source_config3() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'crates-io' + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, [..] +")); +} + +#[test] +fn bad_source_config4() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'http://example.com' + replace-with = 'crates-io' + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, the source `crates-io` is \ + eventually replaced with itself (configuration in [..]) +")); +} + +#[test] +fn bad_source_config5() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'not a url' + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: configuration key `source.bar.registry` specified an invalid URL (in [..]) + +Caused by: + invalid url `not a url`: [..] +")); +} + +#[test] +fn both_git_and_path_specified() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1" + path = "bar" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(foo.cargo("build").arg("-v"), + execs().with_stderr_contains("\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `git` or `path` is allowed. \ +This will be considered an error in future versions +")); +} + +#[test] +fn bad_source_config6() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = ['not', 'a', 'string'] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: expected a string, but found a array for `source.crates-io.replace-with` in [..] +")); +} + +#[test] +fn ignored_git_revision() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + branch = "spam" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(foo.cargo("build").arg("-v"), + execs().with_stderr_contains("\ +[WARNING] key `branch` is ignored for dependency (bar). \ +This will be considered an error in future versions")); +} + +#[test] +fn bad_source_config7() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + registry = 'http://example.com' + local-registry = 'file:///another/file' + "#) + .build(); + + Package::new("bar", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: more than one source URL specified for `source.foo` +")); +} + +#[test] +fn bad_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = 3 + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a version string like [..] +")); +} + +#[test] +fn bad_debuginfo() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 'a' + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: string \"a\", expected a boolean or an integer for [..] +")); +} + +#[test] +fn bad_opt_level() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 3 + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a boolean or a string for key [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/bad-manifest-path.rs b/collector/compile-benchmarks/cargo/tests/bad-manifest-path.rs new file mode 100644 index 000000000..2dc0d5b26 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/bad-manifest-path.rs @@ -0,0 +1,375 @@ +extern crate hamcrest; +extern crate cargotest; + +use cargotest::support::{project, execs, main_file, basic_bin_manifest}; +use hamcrest::{assert_that}; + +fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo(command) + .arg("--manifest-path").arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr("[ERROR] the manifest-path must be a path \ + to a Cargo.toml file")); +} + + +fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { + let p = project("foo").build(); + let expected_path = manifest_path_argument + .split('/').collect::>().join("[..]"); + + assert_that(p.cargo(command) + .arg("--manifest-path").arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr( + format!("[ERROR] manifest path `{}` does not exist", + expected_path) + )); +} + +#[test] +fn bench_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("bench", "foo"); +} + +#[test] +fn bench_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[test] +fn bench_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[test] +fn bench_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn build_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("build", "foo"); +} + +#[test] +fn build_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[test] +fn build_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[test] +fn build_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn clean_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("clean", "foo"); +} + +#[test] +fn clean_dir_plus_file() { + assert_not_a_cargo_toml("clean", "foo/bar"); +} + +#[test] +fn clean_dir_plus_path() { + assert_not_a_cargo_toml("clean", "foo/bar/baz"); +} + +#[test] +fn clean_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn doc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("doc", "foo"); +} + +#[test] +fn doc_dir_plus_file() { + assert_not_a_cargo_toml("doc", "foo/bar"); +} + +#[test] +fn doc_dir_plus_path() { + assert_not_a_cargo_toml("doc", "foo/bar/baz"); +} + +#[test] +fn doc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn fetch_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("fetch", "foo"); +} + +#[test] +fn fetch_dir_plus_file() { + assert_not_a_cargo_toml("fetch", "foo/bar"); +} + +#[test] +fn fetch_dir_plus_path() { + assert_not_a_cargo_toml("fetch", "foo/bar/baz"); +} + +#[test] +fn fetch_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn generate_lockfile_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("generate-lockfile", "foo"); +} + +#[test] +fn generate_lockfile_dir_plus_file() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); +} + +#[test] +fn generate_lockfile_dir_plus_path() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); +} + +#[test] +fn generate_lockfile_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn package_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("package", "foo"); +} + +#[test] +fn package_dir_plus_file() { + assert_not_a_cargo_toml("package", "foo/bar"); +} + +#[test] +fn package_dir_plus_path() { + assert_not_a_cargo_toml("package", "foo/bar/baz"); +} + +#[test] +fn package_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn pkgid_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("pkgid", "foo"); +} + +#[test] +fn pkgid_dir_plus_file() { + assert_not_a_cargo_toml("pkgid", "foo/bar"); +} + +#[test] +fn pkgid_dir_plus_path() { + assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); +} + +#[test] +fn pkgid_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn publish_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("publish", "foo"); +} + +#[test] +fn publish_dir_plus_file() { + assert_not_a_cargo_toml("publish", "foo/bar"); +} + +#[test] +fn publish_dir_plus_path() { + assert_not_a_cargo_toml("publish", "foo/bar/baz"); +} + +#[test] +fn publish_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn read_manifest_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("read-manifest", "foo"); +} + +#[test] +fn read_manifest_dir_plus_file() { + assert_not_a_cargo_toml("read-manifest", "foo/bar"); +} + +#[test] +fn read_manifest_dir_plus_path() { + assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); +} + +#[test] +fn read_manifest_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn run_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("run", "foo"); +} + +#[test] +fn run_dir_plus_file() { + assert_not_a_cargo_toml("run", "foo/bar"); +} + +#[test] +fn run_dir_plus_path() { + assert_not_a_cargo_toml("run", "foo/bar/baz"); +} + +#[test] +fn run_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn rustc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("rustc", "foo"); +} + +#[test] +fn rustc_dir_plus_file() { + assert_not_a_cargo_toml("rustc", "foo/bar"); +} + +#[test] +fn rustc_dir_plus_path() { + assert_not_a_cargo_toml("rustc", "foo/bar/baz"); +} + +#[test] +fn rustc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn test_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("test", "foo"); +} + +#[test] +fn test_dir_plus_file() { + assert_not_a_cargo_toml("test", "foo/bar"); +} + +#[test] +fn test_dir_plus_path() { + assert_not_a_cargo_toml("test", "foo/bar/baz"); +} + +#[test] +fn test_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn update_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("update", "foo"); +} + +#[test] +fn update_dir_plus_file() { + assert_not_a_cargo_toml("update", "foo/bar"); +} + +#[test] +fn update_dir_plus_path() { + assert_not_a_cargo_toml("update", "foo/bar/baz"); +} + +#[test] +fn update_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn verify_project_dir_containing_cargo_toml() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(1) + .with_stdout("\ +{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ")); +} + +#[test] +fn verify_project_dir_plus_file() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg("foo/bar") + .cwd(p.root().parent().unwrap()), + execs().with_status(1) + .with_stdout("\ +{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ")); +} + +#[test] +fn verify_project_dir_plus_path() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg("foo/bar/baz") + .cwd(p.root().parent().unwrap()), + execs().with_status(1) + .with_stdout("\ +{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ")); +} + +#[test] +fn verify_project_dir_to_nonexistent_cargo_toml() { + let p = project("foo").build(); + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg("foo/bar/baz/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(1) + .with_stdout("\ +{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ + ")); +} diff --git a/collector/compile-benchmarks/cargo/tests/bench.rs b/collector/compile-benchmarks/cargo/tests/bench.rs new file mode 100644 index 000000000..30cebde21 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/bench.rs @@ -0,0 +1,1335 @@ +extern crate cargotest; +extern crate cargo; +extern crate hamcrest; + +use std::str; + +use cargo::util::process; +use cargotest::is_nightly; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn cargo_bench_simple() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + }"#) + .build(); + + assert_that(p.cargo("build"), execs()); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("hello\n")); + + assert_that(p.cargo("bench"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.5.0 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test bench_hello ... bench: [..]")); +} + +#[test] +fn bench_bench_implicit() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#) + .file("tests/other.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#) + .file("benches/mybench.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#) + .build(); + + assert_that(p.cargo("bench").arg("--benches"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]mybench-[..][EXE] +", dir = p.url())) + .with_stdout_contains("test run2 ... bench: [..]")); +} + +#[test] +fn bench_bin_implicit() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#) + .file("tests/other.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#) + .file("benches/mybench.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#) + .build(); + + assert_that(p.cargo("bench").arg("--bins"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +", dir = p.url())) + .with_stdout_contains("test run1 ... bench: [..]")); +} + +#[test] +fn bench_tarname() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("benches/bin1.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .file("benches/bin2.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#) + .build(); + + assert_that(p.cargo("bench").arg("--bench").arg("bin2"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]bin2-[..][EXE] +", dir = p.url())) + .with_stdout_contains("test run2 ... bench: [..]")); +} + +#[test] +fn bench_multiple_targets() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("benches/bin1.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .file("benches/bin2.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#) + .file("benches/bin3.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#) + .build(); + + assert_that(p.cargo("bench") + .arg("--bench").arg("bin1") + .arg("--bench").arg("bin2"), + execs() + .with_status(0) + .with_stdout_contains("test run1 ... bench: [..]") + .with_stdout_contains("test run2 ... bench: [..]") + .with_stdout_does_not_contain("run3")); +} + +#[test] +fn cargo_bench_verbose() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bench_hello(_b: &mut test::Bencher) {} + "#) + .build(); + + assert_that(p.cargo("bench").arg("-v").arg("hello"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`", url = p.url())) + .with_stdout_contains("test bench_hello ... bench: [..]")); +} + +#[test] +fn many_similar_names() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #![feature(test)] + #[cfg(test)] + extern crate test; + pub fn foo() {} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ") + .file("src/main.rs", " + #![feature(test)] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } + ") + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate foo; + extern crate test; + #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } + "#) + .build(); + + let output = p.cargo("bench").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output); + assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output); + assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output); +} + +#[test] +fn cargo_bench_failing_test() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "nope") + }"#) + .build(); + + assert_that(p.cargo("build"), execs()); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("hello\n")); + + assert_that(p.cargo("bench"), + execs().with_stdout_contains("test bench_hello ... ") + .with_stderr_contains(format!("\ +[COMPILING] foo v0.5.0 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +thread '[..]' panicked at 'assertion failed: \ + `(left == right)`[..]", p.url())) + .with_stderr_contains("[..]left: `\"hello\"`[..]") + .with_stderr_contains("[..]right: `\"nope\"`[..]") + .with_stderr_contains("[..]src[/]main.rs:15[..]") + .with_status(101)); +} + +#[test] +fn bench_with_lib_dep() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#) + .file("src/lib.rs", r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + "#) + .file("src/main.rs", " + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]", p.url())) + .with_stdout_contains("test lib_bench ... bench: [..]") + .with_stdout_contains("test bin_bench ... bench: [..]")); +} + +#[test] +fn bench_with_deep_lib_dep() { + if !is_nightly() { return } + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#) + .file("src/lib.rs", " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + #[bench] + fn bar_bench(_b: &mut test::Bencher) { + foo::foo(); + } + ") + .build(); + let _p2 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]", dir = p.url())) + .with_stdout_contains("test bar_bench ... bench: [..]")); +} + +#[test] +fn external_bench_explicit() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bench]] + name = "bench" + path = "src/bench.rs" + "#) + .file("src/lib.rs", r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#) + .file("src/bench.rs", r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#) + .build(); + + assert_that(p.cargo("bench"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", p.url())) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]")); +} + +#[test] +fn external_bench_implicit() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#) + .file("benches/external.rs", r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#) + .build(); + + assert_that(p.cargo("bench"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]external-[..][EXE]", p.url())) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]")); +} + +#[test] +fn dont_run_examples() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r"") + .file("examples/dont-run-me-i-will-fail.rs", r#" + fn main() { panic!("Examples should not be run by 'cargo test'"); } + "#) + .build(); + assert_that(p.cargo("bench"), + execs().with_status(0)); +} + +#[test] +fn pass_through_command_line() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] fn foo(_b: &mut test::Bencher) {} + #[bench] fn bar(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("bench").arg("bar"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url())) + .with_stdout_contains("test bar ... bench: [..]")); + + assert_that(p.cargo("bench").arg("foo"), + execs().with_status(0) + .with_stderr("[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test foo ... bench: [..]")); +} + +// Regression test for running cargo-bench twice with +// tests in an rlib +#[test] +fn cargo_bench_twice() { + if !is_nightly() { return } + + let p = project("test_twice") + .file("Cargo.toml", &basic_lib_manifest("test_twice")) + .file("src/test_twice.rs", r#" + #![crate_type = "rlib"] + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + fn dummy_bench(b: &mut test::Bencher) { } + "#) + .build(); + + p.cargo("build"); + + for _ in 0..2 { + assert_that(p.cargo("bench"), + execs().with_status(0)); + } +} + +#[test] +fn lib_bin_same_name() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#) + .file("src/lib.rs", " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ") + .file("src/main.rs", " + #![cfg_attr(test, feature(test))] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains_n("test [..] ... bench: [..]", 2)); +} + +#[test] +fn lib_with_standard_name() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ") + .file("benches/bench.rs", " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE] +[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", dir = p.url())) + .with_stdout_contains("test foo_bench ... bench: [..]") + .with_stdout_contains("test bench ... bench: [..]")); +} + +#[test] +fn lib_with_standard_name2() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + bench = false + doctest = false + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + #![feature(test)] + #[cfg(test)] + extern crate syntax; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]", dir = p.url())) + .with_stdout_contains("test bench ... bench: [..]")); +} + +#[test] +fn bench_dylib() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + #![cfg_attr(test, feature(test))] + extern crate bar as the_bar; + #[cfg(test)] + extern crate test; + + pub fn bar() { the_bar::baz(); } + + #[bench] + fn foo(_b: &mut test::Bencher) {} + "#) + .file("benches/bench.rs", r#" + #![feature(test)] + extern crate foo as the_foo; + extern crate test; + + #[bench] + fn foo(_b: &mut test::Bencher) { the_foo::bar(); } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#) + .file("bar/src/lib.rs", " + pub fn baz() {} + ") + .build(); + + assert_that(p.cargo("bench").arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench` +[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url())) + .with_stdout_contains_n("test foo ... bench: [..]", 2)); + + p.root().move_into_the_past(); + assert_that(p.cargo("bench").arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[FRESH] bar v0.0.1 ({dir}/bar) +[FRESH] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench` +[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url())) + .with_stdout_contains_n("test foo ... bench: [..]", 2)); +} + +#[test] +fn bench_twice_with_build_cmd() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", " + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] + fn foo(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url())) + .with_stdout_contains("test foo ... bench: [..]")); + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr("[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test foo ... bench: [..]")); +} + +#[test] +fn bench_with_examples() { + if !is_nightly() { return } + + let p = project("testbench") + .file("Cargo.toml", r#" + [package] + name = "testbench" + version = "6.6.6" + authors = [] + + [[example]] + name = "teste1" + + [[bench]] + name = "testb1" + "#) + .file("src/lib.rs", r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[cfg(test)] + use test::Bencher; + + pub fn f1() { + println!("f1"); + } + + pub fn f2() {} + + #[bench] + fn bench_bench1(_b: &mut Bencher) { + f2(); + } + "#) + .file("benches/testb1.rs", " + #![feature(test)] + extern crate testbench; + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bench2(_b: &mut Bencher) { + testbench::f2(); + } + ") + .file("examples/teste1.rs", r#" + extern crate testbench; + + fn main() { + println!("example1"); + testbench::f1(); + } + "#) + .build(); + + assert_that(p.cargo("bench").arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] testbench v6.6.6 ({url}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `{dir}[/]target[/]release[/]deps[/]testbench-[..][EXE] --bench` +[RUNNING] `{dir}[/]target[/]release[/]deps[/]testb1-[..][EXE] --bench`", + dir = p.root().display(), url = p.url())) + .with_stdout_contains("test bench_bench1 ... bench: [..]") + .with_stdout_contains("test bench_bench2 ... bench: [..]")); +} + +#[test] +fn test_a_bench() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + name = "foo" + test = false + doctest = false + + [[bench]] + name = "b" + test = true + "#) + .file("src/lib.rs", "") + .file("benches/b.rs", r#" + #[test] + fn foo() {} + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]") + .with_stdout_contains("test foo ... ok")); +} + +#[test] +fn test_bench_no_run() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("benches/bbaz.rs", r#" + #![feature(test)] + + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_: &mut Bencher) {} + "#) + .build(); + + assert_that(p.cargo("bench").arg("--no-run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +")); +} + +#[test] +fn test_bench_no_fail_fast() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + } + + #[bench] + fn bench_nope(_b: &mut test::Bencher) { + assert_eq!("nope", hello()) + }"#) + .build(); + + assert_that(p.cargo("bench").arg("--no-fail-fast"), + execs().with_status(101) + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("running 2 tests") + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_hello [..]") + .with_stdout_contains("test bench_nope [..]")); +} + +#[test] +fn test_bench_multiple_packages() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/lib.rs", "") + .build(); + + let _bar = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbar" + test = true + "#) + .file("src/lib.rs", "") + .file("benches/bbar.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_b: &mut Bencher) {} + "#) + .build(); + + let _baz = project("baz") + .file("Cargo.toml", r#" + [project] + name = "baz" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbaz" + test = true + "#) + .file("src/lib.rs", "") + .file("benches/bbaz.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_b: &mut Bencher) {} + "#) + .build(); + + + assert_that(p.cargo("bench").arg("-p").arg("bar").arg("-p").arg("baz"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]") + .with_stdout_contains("test bench_baz ... bench: [..]") + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]")); +} + +#[test] +fn bench_all_workspace() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("bar/benches/bar.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#) + .build(); + + assert_that(p.cargo("bench") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]")); +} + +#[test] +fn bench_all_exclude() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + pub fn bar(b: &mut test::Bencher) { + b.iter(|| {}); + } + "#) + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + "#) + .file("baz/src/lib.rs", r#" + #[test] + pub fn baz() { + break_the_build(); + } + "#) + .build(); + + assert_that(p.cargo("bench") + .arg("--all") + .arg("--exclude") + .arg("baz"), + execs().with_status(0) + .with_stdout_contains("\ +running 1 test +test bar ... bench: [..] ns/iter (+/- [..])")); +} + +#[test] +fn bench_all_virtual_manifest() { + if !is_nightly() { return } + + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("foo/benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("bar/benches/bar.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that(p.cargo("bench") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]")); +} + +// https://github.com/rust-lang/cargo/issues/4287 +#[test] +fn legacy_bench_name() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [[bench]] + name = "bench" + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .file("src/bench.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#) + .build(); + + assert_that(p.cargo("bench"), execs().with_status(0).with_stderr_contains("\ +[WARNING] path `[..]src[/]bench.rs` was erroneously implicitly accepted for benchmark `bench`, +please set bench.path in Cargo.toml")); +} + +#[test] +fn bench_virtual_manifest_all_implied() { + if !is_nightly() { return } + + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("foo/benches/foo.rs", r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("bar/benches/bar.rs", r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#) + .build(); + + // The order in which foo and bar are built is not guaranteed + + assert_that(p.cargo("bench"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("\ +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]")); +} diff --git a/collector/compile-benchmarks/cargo/tests/build-auth.rs b/collector/compile-benchmarks/cargo/tests/build-auth.rs new file mode 100644 index 000000000..064fce4fd --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/build-auth.rs @@ -0,0 +1,213 @@ +extern crate bufstream; +extern crate git2; +extern crate cargotest; +extern crate hamcrest; + +use std::collections::HashSet; +use std::io::prelude::*; +use std::net::TcpListener; +use std::thread; + +use bufstream::BufStream; +use cargotest::support::paths; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +// Test that HTTP auth is offered from `credential.helper` +#[test] +fn http_auth_offered() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + + fn headers(rdr: &mut BufRead) -> HashSet { + let valid = ["GET", "Authorization", "Accept", "User-Agent"]; + rdr.lines().map(|s| s.unwrap()) + .take_while(|s| s.len() > 2) + .map(|s| s.trim().to_string()) + .filter(|s| { + valid.iter().any(|prefix| s.starts_with(*prefix)) + }) + .collect() + } + + let t = thread::spawn(move|| { + let mut conn = BufStream::new(server.accept().unwrap().0); + let req = headers(&mut conn); + let user_agent = if cfg!(windows) { + "User-Agent: git/1.0 (libgit2 0.26.0)" + } else { + "User-Agent: git/2.0 (libgit2 0.26.0)" + }; + conn.write_all(b"\ + HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n + \r\n\ + ").unwrap(); + assert_eq!(req, vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Accept: */*", + user_agent, + ].into_iter().map(|s| s.to_string()).collect()); + drop(conn); + + let mut conn = BufStream::new(server.accept().unwrap().0); + let req = headers(&mut conn); + conn.write_all(b"\ + HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n + \r\n\ + ").unwrap(); + assert_eq!(req, vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Authorization: Basic Zm9vOmJhcg==", + "Accept: */*", + user_agent, + ].into_iter().map(|s| s.to_string()).collect()); + }); + + let script = project("script") + .file("Cargo.toml", r#" + [project] + name = "script" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + println!("username=foo"); + println!("password=bar"); + } + "#) + .build(); + + assert_that(script.cargo("build").arg("-v"), + execs().with_status(0)); + let script = script.bin("script"); + + let config = paths::home().join(".gitconfig"); + let mut config = git2::Config::open(&config).unwrap(); + config.set_str("credential.helper", + &script.display().to_string()).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1:{}/foo/bar" + "#, addr.port())) + .file("src/main.rs", "") + .file(".cargo/config","\ + [net] + retry = 0 + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr(&format!("\ +[UPDATING] git repository `http://{addr}/foo/bar` +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update http://{addr}/foo/bar + +Caused by: + failed to clone into: [..] + +Caused by: + failed to authenticate when downloading repository +attempted to find username/password via `credential.helper`, but [..] + +To learn more, run the command again with --verbose. +", + addr = addr))); + + t.join().ok().unwrap(); +} + +// Boy, sure would be nice to have a TLS implementation in rust! +#[test] +fn https_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move|| { + let mut conn = server.accept().unwrap().0; + drop(conn.write(b"1234")); + drop(conn.shutdown(std::net::Shutdown::Write)); + drop(conn.read(&mut [0; 16])); + }); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:{}/foo/bar" + "#, addr.port())) + .file("src/main.rs", "") + .file(".cargo/config","\ + [net] + retry = 0 + ") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains(&format!("\ +[UPDATING] git repository `https://{addr}/foo/bar` +", addr = addr)) + .with_stderr_contains(&format!("\ +Caused by: + {errmsg} +", + errmsg = if cfg!(windows) { + "[[..]] failed to send request: [..]\n" + } else if cfg!(target_os = "macos") { + // OSX is difficult to tests as some builds may use + // Security.framework and others may use OpenSSL. In that case let's + // just not verify the error message here. + "[..]" + } else { + "[..] SSL error: [..]" + }))); + + t.join().ok().unwrap(); +} + +// Boy, sure would be nice to have an SSH implementation in rust! +#[test] +fn ssh_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move|| { + drop(server.accept().unwrap()); + }); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "ssh://127.0.0.1:{}/foo/bar" + "#, addr.port())) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains(&format!("\ +[UPDATING] git repository `ssh://{addr}/foo/bar` +", addr = addr)) + .with_stderr_contains("\ +Caused by: + [[..]] failed to start SSH session: Failed getting banner +")); + t.join().ok().unwrap(); +} diff --git a/collector/compile-benchmarks/cargo/tests/build-lib.rs b/collector/compile-benchmarks/cargo/tests/build-lib.rs new file mode 100644 index 000000000..682b7c7e8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/build-lib.rs @@ -0,0 +1,88 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{basic_bin_manifest, execs, project, Project}; +use hamcrest::{assert_that}; + +fn verbose_output_for_lib(p: &Project) -> String { + format!("\ +[COMPILING] {name} v{version} ({url}) +[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), url = p.url(), + name = "foo", version = "0.0.1") +} + +#[test] +fn build_lib_only() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("build").arg("--lib").arg("-v"), + execs() + .with_status(0) + .with_stderr(verbose_output_for_lib(&p))); +} + + +#[test] +fn build_with_no_lib() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("--lib"), + execs().with_status(101) + .with_stderr("[ERROR] no library targets found")); +} + +#[test] +fn build_with_relative_cargo_home_path() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [dependencies] + + "test-dependency" = { path = "src/test_dependency" } + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/test_dependency/src/lib.rs", r#" "#) + .file("src/test_dependency/Cargo.toml", r#" + [package] + + name = "test-dependency" + version = "0.0.1" + authors = ["wycats@example.com"] + "#) + .build(); + + assert_that(p.cargo("build").env("CARGO_HOME", "./cargo_home/"), + execs() + .with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/build-script-env.rs b/collector/compile-benchmarks/cargo/tests/build-script-env.rs new file mode 100644 index 000000000..610301cfb --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/build-script-env.rs @@ -0,0 +1,106 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::fs::File; + +use cargotest::sleep_ms; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn rerun_if_env_changes() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); + assert_that(p.cargo("build").env("FOO", "bar"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); + assert_that(p.cargo("build").env("FOO", "baz"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); + assert_that(p.cargo("build").env("FOO", "baz"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn rerun_if_env_or_file_changes() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + println!("cargo:rerun-if-changed=foo"); + } + "#) + .file("foo", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); + assert_that(p.cargo("build").env("FOO", "bar"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); + assert_that(p.cargo("build").env("FOO", "bar"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] [..] +")); + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + assert_that(p.cargo("build").env("FOO", "bar"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/build-script.rs b/collector/compile-benchmarks/cargo/tests/build-script.rs new file mode 100644 index 000000000..0748b44ca --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/build-script.rs @@ -0,0 +1,2734 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::{rustc_host, sleep_ms}; +use cargotest::support::{project, execs}; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, existing_dir}; + +#[test] +fn custom_build_script_failed() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("build.rs", r#" + fn main() { + std::process::exit(101); + } + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr(&format!("\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin [..]` +[RUNNING] `[..][/]build-script-build` +[ERROR] failed to run custom build command for `foo v0.5.0 ({url})` +process didn't exit successfully: `[..][/]build-script-build` (exit code: 101)", +url = p.url()))); +} + +#[test] +fn custom_build_env_vars() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [features] + bar_feat = ["bar/foo"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [features] + foo = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn hello() {} + "#); + + let file_content = format!(r#" + use std::env; + use std::io::prelude::*; + use std::path::Path; + use std::fs; + + fn main() {{ + let _target = env::var("TARGET").unwrap(); + let _ncpus = env::var("NUM_JOBS").unwrap(); + let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + + let opt = env::var("OPT_LEVEL").unwrap(); + assert_eq!(opt, "0"); + + let opt = env::var("PROFILE").unwrap(); + assert_eq!(opt, "debug"); + + let debug = env::var("DEBUG").unwrap(); + assert_eq!(debug, "true"); + + let out = env::var("OUT_DIR").unwrap(); + assert!(out.starts_with(r"{0}")); + assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false)); + + let _host = env::var("HOST").unwrap(); + + let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); + + let rustc = env::var("RUSTC").unwrap(); + assert_eq!(rustc, "rustc"); + + let rustdoc = env::var("RUSTDOC").unwrap(); + assert_eq!(rustdoc, "rustdoc"); + }} + "#, + p.root().join("target").join("debug").join("build").display()); + + let p = p.file("bar/build.rs", &file_content).build(); + + assert_that(p.cargo("build").arg("--features").arg("bar_feat"), + execs().with_status(0)); +} + +#[test] +fn custom_build_script_wrong_rustc_flags() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-aaa -bbb"); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr_contains(&format!("\ +[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \ +`-aaa -bbb`", +p.url()))); +} + +/* +#[test] +fn custom_build_script_rustc_flags() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.foo] + path = "foo" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("foo/Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#) + .file("foo/src/lib.rs", r#" + "#) + .file("foo/build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); + } + "#) + .build(); + + // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works) + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(101) + .with_stderr(&format!("\ +[COMPILING] bar v0.5.0 ({url}) +[RUNNING] `rustc --crate-name test {dir}{sep}src{sep}lib.rs --crate-type lib -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=-[..] \ + --out-dir {dir}{sep}target \ + --emit=dep-info,link \ + -L {dir}{sep}target \ + -L {dir}{sep}target{sep}deps` +", sep = path::SEP, +dir = p.root().display(), +url = p.url(), +))); +} +*/ + +#[test] +fn links_no_build_cmd() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] package `foo v0.5.0 (file://[..])` specifies that it links to `a` but does \ +not have a custom build script +")); +} + +#[test] +fn links_duplicates() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#) + .file("src/lib.rs", "") + .file("build.rs", "") + .file("a-sys/Cargo.toml", r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#) + .file("a-sys/src/lib.rs", "") + .file("a-sys/build.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] Multiple packages link to native library `a`. A native library can be \ +linked only once. + +The root-package links to native library `a`. + +Package `a-sys v0.5.0 (file://[..])` + ... which is depended on by `foo v0.5.0 (file://[..])` +also links to native library `a`. +")); +} + +#[test] +fn links_duplicates_deep_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("build.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", "") + .file("a/a-sys/Cargo.toml", r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#) + .file("a/a-sys/src/lib.rs", "") + .file("a/a-sys/build.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] Multiple packages link to native library `a`. A native library can be \ +linked only once. + +The root-package links to native library `a`. + +Package `a-sys v0.5.0 (file://[..])` + ... which is depended on by `a v0.5.0 (file://[..])` + ... which is depended on by `foo v0.5.0 (file://[..])` +also links to native library `a`. +")); +} + +#[test] +fn overrides_and_links() { + let target = rustc_host(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), + "bar"); + assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), + "baz"); + } + "#) + .file(".cargo/config", &format!(r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, target)) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", "not valid rust code") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[..] +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name foo [..] -L foo -L bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn unused_overrides() { + let target = rustc_host(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file(".cargo/config", &format!(r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, target)) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn links_passes_env_vars() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + use std::env; + fn main() { + let lib = env::var("CARGO_MANIFEST_LINKS").unwrap(); + assert_eq!(lib, "foo"); + + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn only_rerun_build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn rebuild_continues_to_pass_env_vars() { + let a = project("a") + .file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::time::Duration; + fn main() { + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + std::thread::sleep(Duration::from_millis(500)); + } + "#) + .build(); + a.root().move_into_the_past(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = '{}' + "#, a.root().display())) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn testing_and_such() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() {} + "#) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("src/lib.rs")).unwrap(); + p.root().move_into_the_past(); + + println!("test"); + assert_that(p.cargo("test").arg("-vj1"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]`") + .with_stdout_contains_n("running 0 tests", 2)); + + println!("doc"); + assert_that(p.cargo("doc").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[DOCUMENTING] foo v0.5.0 (file://[..]) +[RUNNING] `rustdoc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + File::create(&p.root().join("src/main.rs")).unwrap() + .write_all(b"fn main() {}").unwrap(); + println!("run"); + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +")); +} + +#[test] +fn propagation_of_l_flags() { + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L bar"); + } + "#) + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file(".cargo/config", &format!(r#" + [target.{}.foo] + rustc-flags = "-L foo" + "#, target)) + .build(); + + assert_that(p.cargo("build").arg("-v").arg("-j1"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +")); +} + +#[test] +fn propagation_of_l_flags_new() { + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=bar"); + } + "#) + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file(".cargo/config", &format!(r#" + [target.{}.foo] + rustc-link-search = ["foo"] + "#, target)) + .build(); + + assert_that(p.cargo("build").arg("-v").arg("-j1"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +")); +} + +#[test] +fn build_deps_simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("build.rs", " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name a [..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..] --extern a=[..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn build_deps_not_for_normal() { + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.aaaaa] + path = "a" + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate aaaaa;") + .file("build.rs", " + #[allow(unused_extern_crates)] + extern crate aaaaa; + fn main() {} + ") + .file("a/Cargo.toml", r#" + [project] + name = "aaaaa" + version = "0.5.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(101) + .with_stderr_contains("\ +[..]can't find crate for `aaaaa`[..] +") + .with_stderr_contains("\ +[ERROR] Could not compile `foo`. + +Caused by: + process didn't exit successfully: [..] +")); +} + +#[test] +fn build_cmd_with_a_build_cmd() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("build.rs", " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "../b" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] b v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name b [..]` +[COMPILING] a v0.5.0 (file://[..]) +[RUNNING] `rustc [..] a[/]build.rs [..] --extern b=[..]` +[RUNNING] `[..][/]a-[..][/]build-script-build` +[RUNNING] `rustc --crate-name a [..]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..]target[/]debug[/]deps \ + -L [..]target[/]debug[/]deps` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin \ + --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] --out-dir [..] \ + -L [..]target[/]debug[/]deps \ + --extern a=[..]liba[..].rlib` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L [..]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn out_dir_is_preserved() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + use std::fs::File; + use std::path::Path; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::create(Path::new(&out).join("foo")).unwrap(); + } + "#) + .build(); + + // Make the file + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + p.root().move_into_the_past(); + + // Change to asserting that it's there + File::create(&p.root().join("build.rs")).unwrap().write_all(br#" + use std::env; + use std::old_io::File; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::open(&Path::new(&out).join("foo")).unwrap(); + } + "#).unwrap(); + p.root().move_into_the_past(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + + // Run a fresh build where file should be preserved + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + + // One last time to make sure it's still there. + File::create(&p.root().join("foo")).unwrap(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn output_separate_lines() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L foo"); + println!("cargo:rustc-flags=-l static=foo"); + } + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr_contains("\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +")); +} + +#[test] +fn output_separate_lines_new() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=foo"); + println!("cargo:rustc-link-lib=static=foo"); + } + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr_contains("\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +")); +} + +#[cfg(not(windows))] // FIXME(#867) +#[test] +fn code_generation() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/main.rs", r#" + include!(concat!(env!("OUT_DIR"), "/hello.rs")); + + fn main() { + println!("{}", message()); + } + "#) + .file("build.rs", r#" + use std::env; + use std::fs::File; + use std::io::prelude::*; + use std::path::PathBuf; + + fn main() { + let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); + let mut f = File::create(&dst.join("hello.rs")).unwrap(); + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); + } + "#) + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo`") + .with_stdout("\ +Hello, World! +")); + + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn release_with_build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0)); +} + +#[test] +fn build_script_only() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#"fn main() {}"#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present")); +} + +#[test] +fn shared_dep_with_a_build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + + [build-dependencies.b] + path = "b" + "#) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "../a" + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn transitive_dep_host() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "b" + "#) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [lib] + name = "b" + plugin = true + + [dependencies.a] + path = "../a" + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn test_a_lib_with_a_build_command() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", r#" + include!(concat!(env!("OUT_DIR"), "/foo.rs")); + + /// ``` + /// foo::bar(); + /// ``` + pub fn bar() { + assert_eq!(foo(), 1); + } + "#) + .file("build.rs", r#" + use std::env; + use std::io::prelude::*; + use std::fs::File; + use std::path::PathBuf; + + fn main() { + let out = PathBuf::from(env::var("OUT_DIR").unwrap()); + File::create(out.join("foo.rs")).unwrap().write_all(b" + fn foo() -> i32 { 1 } + ").unwrap(); + } + "#) + .build(); + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn test_dev_dep_build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn build_script_with_dynamic_native_dependency() { + + let _workspace = project("ws") + .file("Cargo.toml", r#" + [workspace] + members = ["builder", "foo"] + "#) + .build(); + + let build = project("ws/builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + plugin = true + "#) + .file("src/lib.rs", r#" + #[no_mangle] + pub extern fn foo() {} + "#) + .build(); + + let foo = project("ws/foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.bar] + path = "bar" + "#) + .file("build.rs", r#" + extern crate bar; + fn main() { bar::bar() } + "#) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("bar/build.rs", r#" + use std::env; + use std::path::PathBuf; + + fn main() { + let src = PathBuf::from(env::var("SRC").unwrap()); + println!("cargo:rustc-link-search=native={}/target/debug/deps", + src.display()); + } + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() { + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + unsafe { foo() } + } + "#) + .build(); + + assert_that(build.cargo("build").arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc"), + execs().with_status(0)); + + assert_that(foo.cargo("build").arg("-v").env("SRC", build.root()) + .env("RUST_LOG", "cargo::ops::cargo_rustc"), + execs().with_status(0)); +} + +#[test] +fn profile_and_opt_level_set_correctly() { + let build = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + + fn main() { + assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); + assert_eq!(env::var("PROFILE").unwrap(), "release"); + assert_eq!(env::var("DEBUG").unwrap(), "false"); + } + "#) + .build(); + assert_that(build.cargo("bench"), + execs().with_status(0)); +} + +#[test] +fn build_script_with_lto() { + let build = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + + [profile.dev] + lto = true + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + } + "#) + .build(); + assert_that(build.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn test_duplicate_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + + [build-dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#) + .file("build.rs", r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn do_nothing() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn cfg_feedback() { + let build = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/main.rs", " + #[cfg(foo)] + fn main() {} + ") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#) + .build(); + assert_that(build.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn cfg_override() { + let target = rustc_host(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#) + .file("src/main.rs", " + #[cfg(foo)] + fn main() {} + ") + .file("build.rs", "") + .file(".cargo/config", &format!(r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, target)) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn cfg_test() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#) + .file("src/lib.rs", r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#) + .file("tests/test.rs", r#" + #[cfg(foo)] + #[test] + fn test_bar() {} + "#) + .build(); + assert_that(p.cargo("test").arg("-v"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..][/]build-script-build` +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", dir = p.url())) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3)); +} + +#[test] +fn cfg_doc() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#) + .file("src/lib.rs", r#" + #[cfg(foo)] + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("bar/build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=bar"); + } + "#) + .file("bar/src/lib.rs", r#" + #[cfg(bar)] + pub fn bar() {} + "#) + .build(); + assert_that(p.cargo("doc"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); +} + +#[test] +fn cfg_override_test() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + "#) + .file("build.rs", "") + .file(".cargo/config", &format!(r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, rustc_host())) + .file("src/lib.rs", r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#) + .file("tests/test.rs", r#" + #[cfg(foo)] + #[test] + fn test_bar() {} + "#) + .build(); + assert_that(p.cargo("test").arg("-v"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `[..]` +[RUNNING] `[..]` +[RUNNING] `[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", dir = p.url())) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3)); +} + +#[test] +fn cfg_override_doc() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + + [dependencies.bar] + path = "bar" + "#) + .file(".cargo/config", &format!(r#" + [target.{target}.a] + rustc-cfg = ["foo"] + [target.{target}.b] + rustc-cfg = ["bar"] + "#, target = rustc_host())) + .file("build.rs", "") + .file("src/lib.rs", r#" + #[cfg(foo)] + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "b" + "#) + .file("bar/build.rs", "") + .file("bar/src/lib.rs", r#" + #[cfg(bar)] + pub fn bar() {} + "#) + .build(); + assert_that(p.cargo("doc"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); +} + +#[test] +fn env_build() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/main.rs", r#" + const FOO: &'static str = env!("FOO"); + fn main() { + println!("{}", FOO); + } + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#) + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(p.cargo("run").arg("-v"), + execs().with_status(0).with_stdout("foo\n")); +} + +#[test] +fn env_test() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#) + .file("src/lib.rs", r#" + pub const FOO: &'static str = env!("FOO"); + "#) + .file("tests/test.rs", r#" + extern crate foo; + + #[test] + fn test_foo() { + assert_eq!("foo", foo::FOO); + } + "#) + .build(); + assert_that(p.cargo("test").arg("-v"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..][/]build-script-build` +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name test[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --crate-name foo[..]", dir = p.url())) + .with_stdout_contains_n("running 0 tests", 2) + .with_stdout_contains("test test_foo ... ok")); +} + +#[test] +fn env_doc() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/main.rs", r#" + const FOO: &'static str = env!("FOO"); + fn main() {} + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#) + .build(); + assert_that(p.cargo("doc").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn flags_go_into_tests() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + b = { path = "b" } + "#) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#) + .file("b/src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=test"); + } + "#) + .build(); + + assert_that(p.cargo("test").arg("-v").arg("--test=foo"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..] a[/]build.rs [..]` +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..] -L test[..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b[/]src[/]lib.rs [..] -L test[..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] src[/]lib.rs [..] -L test[..]` +[RUNNING] `rustc [..] tests[/]foo.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]`") + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("test").arg("-v").arg("-pb").arg("--lib"), + execs().with_status(0) + .with_stderr("\ +[FRESH] a v0.5.0 ([..] +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b[/]src[/]lib.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]b-[..][EXE]`") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn diamond_passes_args_only_once() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + b = { path = "../b" } + c = { path = "../c" } + "#) + .file("a/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + c = { path = "../c" } + "#) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", r#" + [project] + name = "c" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("c/build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=native=test"); + } + "#) + .file("c/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] c v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `[..]rlib -L native=test` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn adding_an_override_invalidates() { + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + File::create(p.root().join(".cargo/config")).unwrap().write_all(format!(" + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", target).as_bytes()).unwrap(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn changing_an_override_invalidates() { + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(" + [target.{}.foo] + rustc-link-search = [\"native=foo\"] + ", target)) + .file("build.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + File::create(p.root().join(".cargo/config")).unwrap().write_all(format!(" + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", target).as_bytes()).unwrap(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + + +#[test] +fn fresh_builds_possible_with_link_libs() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "nativefoo" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(" + [target.{}.nativefoo] + rustc-link-lib = [\"a\"] + rustc-link-search = [\"./b\"] + rustc-flags = \"-l z -L ./\" + ", target)) + .file("build.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build") + .arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"), + execs().with_status(0).with_stderr("\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + + +#[test] +fn fresh_builds_possible_with_multiple_metadata_overrides() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(" + [target.{}.foo] + a = \"\" + b = \"\" + c = \"\" + d = \"\" + e = \"\" + ", target)) + .file("build.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build") + .arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"), + execs().with_status(0).with_stderr("\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + + +#[test] +fn rebuild_only_on_explicit_paths() { + let p = project("a") + .file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rerun-if-changed=foo"); + println!("cargo:rerun-if-changed=bar"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + + // files don't exist, so should always rerun if they don't exist + println!("run without"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + File::create(p.root().join("bar")).unwrap(); + + // now the exist, so run once, catch the mtime, then shouldn't run again + println!("run with"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + println!("run with2"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[FRESH] a v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + sleep_ms(1000); + + // random other files do not affect freshness + println!("run baz"); + File::create(p.root().join("baz")).unwrap(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[FRESH] a v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + // but changing dependent files does + println!("run foo change"); + File::create(p.root().join("foo")).unwrap(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + // .. as does deleting a file + println!("run foo delete"); + fs::remove_file(p.root().join("bar")).unwrap(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + + +#[test] +fn doctest_recieves_build_link_args() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#) + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]` +")); +} + +#[test] +fn please_respect_the_dag() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies] + a = { path = 'a' } + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustc [..] -L native=foo -L native=bar[..]` +")); +} + +#[test] +fn non_utf8_output() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + use std::io::prelude::*; + + fn main() { + let mut out = std::io::stdout(); + // print something that's not utf8 + out.write_all(b"\xff\xff\n").unwrap(); + + // now print some cargo metadata that's utf8 + println!("cargo:rustc-cfg=foo"); + + // now print more non-utf8 + out.write_all(b"\xff\xff\n").unwrap(); + } + "#) + .file("src/main.rs", r#" + #[cfg(foo)] + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn custom_target_dir() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + target-dir = 'test' + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn panic_abort_with_build_scripts() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [profile.release] + panic = 'abort' + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies] + b = { path = "../b" } + "#) + .file("a/src/lib.rs", "") + .file("a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0)); +} + +#[test] +fn warnings_emitted() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn warnings_hidden_for_upstream() { + Package::new("bar", "0.1.0") + .file("build.rs", r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#) + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn warnings_printed_on_vv() { + Package::new("bar", "0.1.0") + .file("build.rs", r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#) + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-vv"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn output_shows_on_vv() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::io::prelude::*; + + fn main() { + std::io::stderr().write_all(b"stderr\n").unwrap(); + std::io::stdout().write_all(b"stdout\n").unwrap(); + } + "#) + .build(); + + assert_that(p.cargo("build").arg("-vv"), + execs().with_status(0) + .with_stdout("\ +stdout +") + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +stderr +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn links_with_dots() { + let target = rustc_host(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + links = "a.b" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-link-search=bar") + } + "#) + .file(".cargo/config", &format!(r#" + [target.{}.'a.b'] + rustc-link-search = ["foo"] + "#, target)) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]` +")); +} + +#[test] +fn rustc_and_rustdoc_set_correctly() { + let p = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + + fn main() { + assert_eq!(env::var("RUSTC").unwrap(), "rustc"); + assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc"); + } + "#) + .build(); + assert_that(p.cargo("bench"), + execs().with_status(0)); +} + +#[test] +fn cfg_env_vars_available() { + let p = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + + fn main() { + let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); + if cfg!(unix) { + assert_eq!(fam, "unix"); + } else { + assert_eq!(fam, "windows"); + } + } + "#) + .build(); + assert_that(p.cargo("bench"), + execs().with_status(0)); +} + +#[test] +fn switch_features_rerun() { + let p = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + + [features] + foo = [] + "#) + .file("src/main.rs", r#" + fn main() { + println!(include_str!(concat!(env!("OUT_DIR"), "/output"))); + } + "#) + .file("build.rs", r#" + use std::env; + use std::fs::File; + use std::io::Write; + use std::path::Path; + + fn main() { + let out_dir = env::var_os("OUT_DIR").unwrap(); + let out_dir = Path::new(&out_dir).join("output"); + let mut f = File::create(&out_dir).unwrap(); + + if env::var_os("CARGO_FEATURE_FOO").is_some() { + f.write_all(b"foo").unwrap(); + } else { + f.write_all(b"bar").unwrap(); + } + } + "#) + .build(); + + assert_that(p.cargo("run").arg("-v").arg("--features=foo"), + execs().with_status(0).with_stdout("foo\n")); + assert_that(p.cargo("run").arg("-v"), + execs().with_status(0).with_stdout("bar\n")); + assert_that(p.cargo("run").arg("-v").arg("--features=foo"), + execs().with_status(0).with_stdout("foo\n")); +} + +#[test] +fn assume_build_script_when_build_rs_present() { + let p = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + if ! cfg!(foo) { + panic!("the build script was not run"); + } + } + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#) + .build(); + + assert_that(p.cargo("run").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn if_build_set_to_false_dont_treat_build_rs_as_build_script() { + let p = project("builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = false + "#) + .file("src/main.rs", r#" + fn main() { + if cfg!(foo) { + panic!("the build script was run"); + } + } + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#) + .build(); + + assert_that(p.cargo("run").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn deterministic_rustc_dependency_flags() { + // This bug is non-deterministic hence the large number of dependencies + // in the hopes it will have a much higher chance of triggering it. + + Package::new("dep1", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "dep1" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L native=test1"); + } + "#) + .file("src/lib.rs", "") + .publish(); + Package::new("dep2", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "dep2" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L native=test2"); + } + "#) + .file("src/lib.rs", "") + .publish(); + Package::new("dep3", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "dep3" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L native=test3"); + } + "#) + .file("src/lib.rs", "") + .publish(); + Package::new("dep4", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "dep4" + version = "0.1.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-L native=test4"); + } + "#) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + dep2 = "*" + dep3 = "*" + dep4 = "*" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \ +-L native=test3 -L native=test4` +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/build.rs b/collector/compile-benchmarks/cargo/tests/build.rs new file mode 100644 index 000000000..2103d5db3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/build.rs @@ -0,0 +1,3911 @@ +extern crate cargo; +#[macro_use] +extern crate cargotest; +extern crate hamcrest; +extern crate tempdir; + +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargo::util::paths::dylib_path_envvar; +use cargo::util::{process, ProcessBuilder}; +use cargotest::{is_nightly, rustc_host, sleep_ms}; +use cargotest::support::paths::{CargoPathExt,root}; +use cargotest::support::{ProjectBuilder}; +use cargotest::support::{project, execs, main_file, basic_bin_manifest}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, existing_dir, is_not}; +use tempdir::TempDir; + +#[test] +fn cargo_compile_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n")); +} + +#[test] +fn cargo_fail_with_no_stderr() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &String::from("refusal")) + .build(); + assert_that(p.cargo("build").arg("--message-format=json"), execs().with_status(101) + .with_stderr_does_not_contain("--- stderr")); +} + +/// Check that the `CARGO_INCREMENTAL` environment variable results in +/// `rustc` getting `-Zincremental` passed to it. +#[test] +fn cargo_compile_incremental() { + if !is_nightly() { + return + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs().with_stderr_contains( + "[RUNNING] `rustc [..] -Zincremental=[..][/]target[/]debug[/]incremental`\n") + .with_status(0)); + + assert_that( + p.cargo("test").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs().with_stderr_contains( + "[RUNNING] `rustc [..] -Zincremental=[..][/]target[/]debug[/]incremental`\n") + .with_status(0)); +} + +#[test] +fn cargo_compile_manifest_path() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build") + .arg("--manifest-path").arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn cargo_compile_with_invalid_manifest() { + let p = project("foo") + .file("Cargo.toml", "") + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + no `package` section found. +")) +} + +#[test] +fn cargo_compile_with_invalid_manifest2() { + let p = project("foo") + .file("Cargo.toml", r" + [project] + foo = bar + ") + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 3 +")) +} + +#[test] +fn cargo_compile_with_invalid_manifest3() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/Cargo.toml", "a = bar") + .build(); + + assert_that(p.cargo("build").arg("--manifest-path") + .arg("src/Cargo.toml"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 1 +")) +} + +#[test] +fn cargo_compile_duplicate_build_targets() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "main" + path = "src/main.rs" + crate-type = ["dylib"] + + [dependencies] + "#) + .file("src/main.rs", r#" + #![allow(warnings)] + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(0) + .with_stderr("\ +warning: file found to be present in multiple build targets: [..]main.rs +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn cargo_compile_with_invalid_version() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "1.0" + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Expected dot for key `project.version` +")) + +} + +#[test] +fn cargo_compile_with_invalid_package_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "" + authors = [] + version = "0.0.0" + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + package name cannot be an empty string. +")) +} + +#[test] +fn cargo_compile_with_invalid_bin_target_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "" + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target names cannot be empty +")) +} + +#[test] +fn cargo_compile_with_forbidden_bin_target_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "build" + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the binary target name `build` is forbidden +")) +} + +#[test] +fn cargo_compile_with_invalid_lib_target_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [lib] + name = "" + "#) + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + library target names cannot be empty +")) +} + +#[test] +fn cargo_compile_without_manifest() { + let tmpdir = TempDir::new("cargo").unwrap(); + let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()).build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory +")); +} + +#[test] +fn cargo_compile_with_invalid_code() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "invalid rust code!") + .build(); + + assert_that(p.cargo("build"), + execs() + .with_status(101) + .with_stderr_contains("\ +[ERROR] Could not compile `foo`. + +To learn more, run the command again with --verbose.\n")); + assert_that(&p.root().join("Cargo.lock"), existing_file()); +} + +#[test] +fn cargo_compile_with_invalid_code_in_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", "invalid rust code!") + .build(); + let _bar = project("bar") + .file("Cargo.toml", &basic_bin_manifest("bar")) + .file("src/lib.rs", "invalid rust code!") + .build(); + let _baz = project("baz") + .file("Cargo.toml", &basic_bin_manifest("baz")) + .file("src/lib.rs", "invalid rust code!") + .build(); + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn cargo_compile_with_warnings_in_the_root_package() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "fn main() {} fn dead() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr_contains("\ +[..]function is never used: `dead`[..] +")); +} + +#[test] +fn cargo_compile_with_warnings_in_a_dep_package() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + + name = "foo" + "#) + .file("src/foo.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + pub fn gimme() -> &'static str { + "test passed" + } + + fn dead() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr_contains("\ +[..]function is never used: `dead`[..] +")); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n")); +} + +#[test] +fn cargo_compile_with_nested_deps_inferred() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = 'bar' + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#) + .file("bar/src/lib.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("baz/src/lib.rs", r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n")); +} + +#[test] +fn cargo_compile_with_nested_deps_correct_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + name = "foo" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#) + .file("bar/src/lib.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("baz/src/lib.rs", r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n")); +} + +#[test] +fn cargo_compile_with_nested_deps_shorthand() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + + [lib] + + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#) + .file("baz/src/baz.rs", r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n")); +} + +#[test] +fn cargo_compile_with_nested_deps_longhand() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + version = "0.5.0" + + [[bin]] + + name = "foo" + "#) + .file("src/foo.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + version = "0.5.0" + + [lib] + + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#) + .file("baz/src/baz.rs", r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#) + .build(); + + assert_that(p.cargo("build"), execs()); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n")); +} + +// Check that Cargo gives a sensible error if a dependency can't be found +// because of a name mismatch. +#[test] +fn cargo_compile_with_dep_name_mismatch() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [[bin]] + + name = "foo" + + [dependencies.notquitebar] + + path = "bar" + "#) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"])) + .file("bar/Cargo.toml", &basic_bin_manifest("bar")) + .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr(&format!( +r#"[ERROR] no matching package named `notquitebar` found (required by `foo`) +location searched: {proj_dir}/bar +version required: * +"#, proj_dir = p.url()))); +} + +#[test] +fn cargo_compile_with_filename() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#) + .file("examples/a.rs", r#" + fn main() { println!("example"); } + "#) + .build(); + + assert_that(p.cargo("build").arg("--bin").arg("bin.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no bin target named `bin.rs`")); + + assert_that(p.cargo("build").arg("--bin").arg("a.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?")); + + assert_that(p.cargo("build").arg("--example").arg("example.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no example target named `example.rs`")); + + assert_that(p.cargo("build").arg("--example").arg("a.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?")); +} + +#[test] +fn compile_path_dep_then_change_version() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" + [package] + name = "bar" + version = "0.0.2" + authors = [] + "#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] no matching version `= 0.0.1` found for package `bar` (required by `foo`) +location searched: [..] +versions found: 0.0.2 +consider running `cargo update` to update a path dependency's locked version +")); +} + +#[test] +fn ignores_carriage_return_in_lockfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", r#" + mod a; fn main() {} + "#) + .file("src/a.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let lockfile = p.root().join("Cargo.lock"); + let mut lock = String::new(); + File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); + let lock = lock.replace("\n", "\r\n"); + File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap(); + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn cargo_default_env_metadata_env_var() { + // Ensure that path dep + dylib + env_var get metadata + // (even though path_dep + dylib should not) + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", "// hi") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#) + .file("bar/src/lib.rs", "// hello") + .build(); + + // No metadata on libbar since it's a dylib path dependency + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \ + --emit=dep-info,link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", +dir = p.root().display(), +url = p.url(), +prefix = env::consts::DLL_PREFIX, +suffix = env::consts::DLL_SUFFIX, +))); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + // If you set the env-var, then we expect metadata on libbar + assert_that(p.cargo("build").arg("-v").env("__CARGO_DEFAULT_LIB_METADATA", "stable"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \ + --emit=dep-info,link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar-[..]{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +prefix = env::consts::DLL_PREFIX, +suffix = env::consts::DLL_SUFFIX, +))); +} + +#[test] +fn crate_env_vars() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + description = "This is foo" + homepage = "http://example.com" + authors = ["wycats@example.com"] + "#) + .file("src/main.rs", r#" + extern crate foo; + + + static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); + static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); + static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); + static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); + static VERSION: &'static str = env!("CARGO_PKG_VERSION"); + static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); + static PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); + static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE"); + static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION"); + + fn main() { + let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE, + CARGO_MANIFEST_DIR); + assert_eq!(s, foo::version()); + println!("{}", s); + assert_eq!("foo", PKG_NAME); + assert_eq!("http://example.com", HOMEPAGE); + assert_eq!("This is foo", DESCRIPTION); + let s = format!("{}.{}.{}-{}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE); + assert_eq!(s, VERSION); + } + "#) + .file("src/lib.rs", r#" + pub fn version() -> String { + format!("{}-{}-{} @ {} in {}", + env!("CARGO_PKG_VERSION_MAJOR"), + env!("CARGO_PKG_VERSION_MINOR"), + env!("CARGO_PKG_VERSION_PATCH"), + env!("CARGO_PKG_VERSION_PRE"), + env!("CARGO_MANIFEST_DIR")) + } + "#) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + println!("bin"); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout(&format!("0-5-1 @ alpha.1 in {}\n", + p.root().display()))); + + println!("test"); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn crate_authors_env_vars() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + authors = ["wycats@example.com", "neikos@example.com"] + "#) + .file("src/main.rs", r#" + extern crate foo; + + static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS"); + + fn main() { + let s = "wycats@example.com:neikos@example.com"; + assert_eq!(AUTHORS, foo::authors()); + println!("{}", AUTHORS); + assert_eq!(s, AUTHORS); + } + "#) + .file("src/lib.rs", r#" + pub fn authors() -> String { + format!("{}", env!("CARGO_PKG_AUTHORS")) + } + "#) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + println!("bin"); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("wycats@example.com:neikos@example.com")); + + println!("test"); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error +fn setenv_for_removing_empty_component(mut p: ProcessBuilder) -> ProcessBuilder { + let v = dylib_path_envvar(); + if let Ok(search_path) = env::var(v) { + let new_search_path = + env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty())) + .expect("join_paths"); + p.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly + } + p +} + +// Regression test for #4277 +#[test] +fn crate_library_path_env_var() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", &format!(r##" + fn main() {{ + let search_path = env!("{}"); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(!paths.contains(&"".into())); + }} + "##, dylib_path_envvar())) + .build(); + + assert_that(setenv_for_removing_empty_component(p.cargo("run")), + execs().with_status(0)); +} + +// Regression test for #4277 +#[test] +fn build_with_fake_libc_not_loading() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .file("libc.so.6", r#""#) + .build(); + + assert_that(setenv_for_removing_empty_component(p.cargo("build")), + execs().with_status(0)); +} + +// this is testing that src/.rs still works (for now) +#[test] +fn many_crate_types_old_style_lib_location() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#) + .file("src/foo.rs", r#" + pub fn foo() {} + "#) + .build(); + assert_that(p.cargo("build"), execs().with_status(0).with_stderr_contains("\ +[WARNING] path `[..]src[/]foo.rs` was erroneously implicitly accepted for library `foo`, +please rename the file to `src/lib.rs` or set lib.path in Cargo.toml")); + + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, + env::consts::DLL_SUFFIX); + assert_that(&p.root().join("target/debug").join(&fname), existing_file()); +} + +#[test] +fn many_crate_types_correct() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, + env::consts::DLL_SUFFIX); + assert_that(&p.root().join("target/debug").join(&fname), existing_file()); +} + +#[test] +fn self_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.test] + + path = "." + + [lib] + name = "test" + path = "src/test.rs" + "#) + .file("src/test.rs", "fn main() {}") + .build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] cyclic package dependency: package `test v0.0.0 ([..])` depends on itself +")); +} + +#[test] +fn ignore_broken_symlinks() { + // windows and symlinks don't currently agree that well + if cfg!(windows) { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .symlink("Notafile", "bar") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n")); +} + +#[test] +fn missing_lib_and_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]Cargo.toml` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n")); +} + +#[test] +fn lto_build() { + // FIXME: currently this hits a linker bug on 32-bit MSVC + if cfg!(all(target_env = "msvc", target_pointer_width = "32")) { + return + } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + lto = true + "#) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]main.rs --crate-type bin \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C lto \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[FINISHED] release [optimized] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +))); +} + +#[test] +fn verbose_build() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +))); +} + +#[test] +fn verbose_release_build() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps` +[FINISHED] release [optimized] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +))); +} + +#[test] +fn verbose_release_build_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.foo] + path = "foo" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#) + .file("foo/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}/foo) +[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \ + --crate-type dylib --crate-type rlib \ + --emit=dep-info,link \ + -C prefer-dynamic \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern foo={dir}[/]target[/]release[/]deps[/]{prefix}foo{suffix} \ + --extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib` +[FINISHED] release [optimized] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX))); +} + +#[test] +fn explicit_examples() { + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [lib] + name = "world" + path = "src/lib.rs" + + [[example]] + name = "hello" + path = "examples/ex-hello.rs" + + [[example]] + name = "goodbye" + path = "examples/ex-goodbye.rs" + "#) + .file("src/lib.rs", r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#) + .file("examples/ex-hello.rs", r#" + extern crate world; + fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } + "#) + .file("examples/ex-goodbye.rs", r#" + extern crate world; + fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } + "#) + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + assert_that(process(&p.bin("examples/hello")), + execs().with_status(0).with_stdout("Hello, World!\n")); + assert_that(process(&p.bin("examples/goodbye")), + execs().with_status(0).with_stdout("Goodbye, World!\n")); +} + +#[test] +fn non_existing_example() { + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [lib] + name = "world" + path = "src/lib.rs" + + [[example]] + name = "hello" + "#) + .file("src/lib.rs", "") + .file("examples/ehlo.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `hello` example, specify example.path")); +} + +#[test] +fn non_existing_binary() { + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [[bin]] + name = "hello" + "#) + .file("src/lib.rs", "") + .file("src/bin/ehlo.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `hello` bin, specify bin.path")); +} + +#[test] +fn legacy_binary_paths_warinigs() { + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\ +[WARNING] path `[..]src[/]main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml")); + + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\ +[WARNING] path `[..]src[/]bin[/]main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml")); + + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#) + .file("src/bar.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\ +[WARNING] path `[..]src[/]bar.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml")); +} + +#[test] +fn implicit_examples() { + let p = project("world") + .file("Cargo.toml", r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#) + .file("examples/hello.rs", r#" + extern crate world; + fn main() { + println!("{}, {}!", world::get_hello(), world::get_world()); + } + "#) + .file("examples/goodbye.rs", r#" + extern crate world; + fn main() { + println!("{}, {}!", world::get_goodbye(), world::get_world()); + } + "#) + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that(process(&p.bin("examples/hello")), + execs().with_status(0).with_stdout("Hello, World!\n")); + assert_that(process(&p.bin("examples/goodbye")), + execs().with_status(0).with_stdout("Goodbye, World!\n")); +} + +#[test] +fn standard_build_no_ndebug() { + let p = project("world") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("slow\n")); +} + +#[test] +fn release_build_ndebug() { + let p = project("world") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#) + .build(); + + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0)); + assert_that(process(&p.release_bin("foo")), + execs().with_status(0).with_stdout("fast\n")); +} + +#[test] +fn inferred_main_bin() { + let p = project("world") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(process(&p.bin("foo")), execs().with_status(0)); +} + +#[test] +fn deletion_causes_failure() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + p.change_file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#); + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn bad_cargo_toml_in_target_dir() { + let p = project("world") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("target/Cargo.toml", "bad-toml") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(process(&p.bin("foo")), execs().with_status(0)); +} + +#[test] +fn lib_with_standard_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + fn main() { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); +} + +#[test] +fn simple_staticlib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib"] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + // env var is a test for #1381 + assert_that(p.cargo("build").env("RUST_LOG", "nekoneko=trace"), + execs().with_status(0)); +} + +#[test] +fn staticlib_rlib_and_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib", "rlib"] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file("src/main.rs", r#" + extern crate foo; + + fn main() { + foo::foo(); + }"#) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn opt_out_of_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + bin = [] + + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "bad syntax") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn single_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + path = "src/bar.rs" + "#) + .file("src/bar.rs", "") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn freshness_ignores_excluded() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + exclude = ["src/b*.rs"] + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stdout("")); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stdout("")); +} + +#[test] +fn rebuild_preserves_out_dir() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#) + .file("build.rs", r#" + use std::env; + use std::fs::File; + use std::path::Path; + + fn main() { + let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); + if env::var_os("FIRST").is_some() { + File::create(&path).unwrap(); + } else { + File::create(&path).unwrap(); + } + } + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + assert_that(foo.cargo("build").env("FIRST", "1"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))); + + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))); +} + +#[test] +fn dep_no_libs() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#) + .file("bar/src/main.rs", "") + .build(); + assert_that(foo.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn recompile_space_in_name() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + path = "src/my lib.rs" + "#) + .file("src/my lib.rs", "") + .build(); + assert_that(foo.cargo("build"), execs().with_status(0)); + foo.root().move_into_the_past(); + assert_that(foo.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[cfg(unix)] +#[test] +fn ignore_bad_directories() { + use std::os::unix::prelude::*; + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + let dir = foo.root().join("tmp"); + fs::create_dir(&dir).unwrap(); + let stat = fs::metadata(&dir).unwrap(); + let mut perms = stat.permissions(); + perms.set_mode(0o644); + fs::set_permissions(&dir, perms.clone()).unwrap(); + assert_that(foo.cargo("build"), + execs().with_status(0)); + perms.set_mode(0o755); + fs::set_permissions(&dir, perms).unwrap(); +} + +#[test] +fn bad_cargo_config() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + this is not valid toml + "#) + .build(); + assert_that(foo.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found an identifier at line 2 +")); +} + +#[test] +fn cargo_platform_specific_dependency() { + let host = rustc_host(); + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [target.{host}.dependencies] + dep = {{ path = "dep" }} + [target.{host}.build-dependencies] + build = {{ path = "build" }} + [target.{host}.dev-dependencies] + dev = {{ path = "dev" }} + "#, host = host)) + .file("src/main.rs", r#" + extern crate dep; + fn main() { dep::dep() } + "#) + .file("tests/foo.rs", r#" + extern crate dev; + #[test] + fn foo() { dev::dev() } + "#) + .file("build.rs", r#" + extern crate build; + fn main() { build::build(); } + "#) + .file("dep/Cargo.toml", r#" + [project] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("dep/src/lib.rs", "pub fn dep() {}") + .file("build/Cargo.toml", r#" + [project] + name = "build" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("build/src/lib.rs", "pub fn build() {}") + .file("dev/Cargo.toml", r#" + [project] + name = "dev" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("dev/src/lib.rs", "pub fn dev() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn bad_platform_specific_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.wrong-target.dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("bar/src/lib.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + format!("") + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101)); +} + +#[test] +fn cargo_platform_specific_dependency_wrong_platform() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.non-existing-triplet.dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("bar/src/lib.rs", r#" + invalid rust file, should not be compiled + "#) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(process(&p.bin("foo")), + execs().with_status(0)); + + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); + assert!(lockfile.contains("bar")) +} + +#[test] +fn example_as_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "lib"), existing_file()); +} + +#[test] +fn example_as_rlib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "rlib"), existing_file()); +} + +#[test] +fn example_as_dylib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "dylib"), existing_file()); +} + +#[test] +fn example_as_proc_macro() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["proc-macro"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "#![feature(proc_macro)]") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "proc-macro"), existing_file()); +} + +#[test] +fn example_bin_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("test").arg("--no-run").arg("-v") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), is_not(existing_file())); + // We expect a file of the form bin/foo-{metadata_hash} + assert_that(&p.bin("examples/foo"), existing_file()); + + p.cargo("test").arg("--no-run").arg("-v") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), is_not(existing_file())); + // We expect a file of the form bin/foo-{metadata_hash} + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn compile_then_delete() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("run").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + if cfg!(windows) { + // On windows unlinking immediately after running often fails, so sleep + sleep_ms(100); + } + fs::remove_file(&p.bin("foo")).unwrap(); + assert_that(p.cargo("run").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn transitive_dependencies_not_available() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.aaaaa] + path = "a" + "#) + .file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "aaaaa" + version = "0.0.1" + authors = [] + + [dependencies.bbbbb] + path = "../b" + "#) + .file("a/src/lib.rs", "extern crate bbbbb;") + .file("b/Cargo.toml", r#" + [package] + name = "bbbbb" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr_contains("\ +[..] can't find crate for `bbbbb`[..] +")); +} + +#[test] +fn cyclic_deps_rejected() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = ".." + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr("\ +[ERROR] cyclic package dependency: package `a v0.0.1 ([..])` depends on itself +")); +} + +#[test] +fn predictable_filenames() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib", "rlib"] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, + env::consts::DLL_SUFFIX); + assert_that(&p.root().join("target/debug").join(dylib_name), + existing_file()); +} + +#[test] +fn dashes_to_underscores() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo-bar" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(&p.bin("foo-bar"), existing_file()); +} + +#[test] +fn dashes_in_crate_name_bad() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo-bar" + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101)); +} + +#[test] +fn rustc_env_var() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build") + .env("RUSTC", "rustc-that-does-not-exist").arg("-v"), + execs().with_status(101) + .with_stderr("\ +[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..]) + +Caused by: +[..] +")); + assert_that(&p.bin("a"), is_not(existing_file())); +} + +#[test] +fn filtering() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--lib"), + execs().with_status(0)); + assert_that(&p.bin("a"), is_not(existing_file())); + + assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"), + execs().with_status(0)); + assert_that(&p.bin("a"), existing_file()); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/a"), existing_file()); + assert_that(&p.bin("examples/b"), is_not(existing_file())); +} + +#[test] +fn filtering_implicit_bins() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--bins"), + execs().with_status(0)); + assert_that(&p.bin("a"), existing_file()); + assert_that(&p.bin("b"), existing_file()); + assert_that(&p.bin("examples/a"), is_not(existing_file())); + assert_that(&p.bin("examples/b"), is_not(existing_file())); +} + +#[test] +fn filtering_implicit_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--examples"), + execs().with_status(0)); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/a"), existing_file()); + assert_that(&p.bin("examples/b"), existing_file()); +} + +#[test] +fn ignore_dotfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/.a.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn ignore_dotdirs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/a.rs", "fn main() {}") + .file(".git/Cargo.toml", "") + .file(".pc/dummy-fix.patch/Cargo.toml", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn dotdir_root() { + let p = ProjectBuilder::new("foo", root().join(".foo")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/a.rs", "fn main() {}") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0)); +} + + +#[test] +fn custom_target_dir() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); + + assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), + execs().with_status(0)); + assert_that(&p.root().join("foo/target/debug").join(&exe_name), + existing_file()); + assert_that(&p.root().join("target/debug").join(&exe_name), + is_not(existing_file())); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(&p.root().join("foo/target/debug").join(&exe_name), + existing_file()); + assert_that(&p.root().join("target/debug").join(&exe_name), + existing_file()); + + fs::create_dir(p.root().join(".cargo")).unwrap(); + File::create(p.root().join(".cargo/config")).unwrap().write_all(br#" + [build] + target-dir = "foo/target" + "#).unwrap(); + assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"), + execs().with_status(0)); + assert_that(&p.root().join("bar/target/debug").join(&exe_name), + existing_file()); + assert_that(&p.root().join("foo/target/debug").join(&exe_name), + existing_file()); + assert_that(&p.root().join("target/debug").join(&exe_name), + existing_file()); +} + +#[test] +fn rustc_no_trans() { + if !is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"), + execs().with_status(0)); +} + +#[test] +fn build_multiple_packages() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + doctest = false + "#) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2") + .arg("-p").arg("foo"), + execs().with_status(0)); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n")); + + let d1_path = &p.build_dir().join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir().join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert_that(d1_path, existing_file()); + assert_that(process(d1_path), execs().with_status(0).with_stdout("d1")); + + assert_that(d2_path, existing_file()); + assert_that(process(d2_path), + execs().with_status(0).with_stdout("d2")); +} + +#[test] +fn invalid_spec() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + + [[bin]] + name = "foo" + "#) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .build(); + + assert_that(p.cargo("build").arg("-p").arg("notAValidDep"), + execs().with_status(101).with_stderr("\ +[ERROR] package id specification `notAValidDep` matched no packages +")); + + assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"), + execs().with_status(101).with_stderr("\ +[ERROR] package id specification `notAValidDep` matched no packages +")); +} + +#[test] +fn manifest_with_bom_is_ok() { + let p = project("foo") + .file("Cargo.toml", "\u{FEFF} + [package] + name = \"foo\" + version = \"0.0.1\" + authors = [] + ") + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn panic_abort_compiles_with_panic_abort() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr_contains("[..] -C panic=abort [..]")); +} + +#[test] +fn explicit_color_config_is_propagated_to_rustc() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v").arg("--color").arg("always"), + execs().with_status(0).with_stderr_contains( + "[..]rustc [..] src[/]lib.rs --color always[..]")); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + assert_that(p.cargo("build").arg("-v").arg("--color").arg("never"), + execs().with_status(0).with_stderr("\ +[COMPILING] test v0.0.0 ([..]) +[RUNNING] `rustc [..] --color never [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn compiler_json_error_format() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [profile.dev] + debug = false # prevent the *.dSYM from affecting the test result + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", "fn main() { let unused = 92; }") + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("bar/src/lib.rs", r#"fn dead() {}"#) + .build(); + + assert_that(p.cargo("build").arg("-v") + .arg("--message-format").arg("json"), + execs().with_status(0).with_json(r#" + { + "reason":"compiler-message", + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": null, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":["[..].rlib"], + "fresh": false + } + + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": null, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "filenames": ["[..]"], + "fresh": false + } +"#)); + + // With fresh build, we should repeat the artifacts, + // but omit compiler warnings. + assert_that(p.cargo("build").arg("-v") + .arg("--message-format").arg("json"), + execs().with_status(0).with_json(r#" + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": null, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":["[..].rlib"], + "fresh": true + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": null, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "filenames": ["[..]"], + "fresh": true + } +"#)); +} + +#[test] +fn wrong_message_format_option() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--message-format").arg("XML"), + execs().with_status(1) + .with_stderr_contains( +r#"[ERROR] Could not match 'xml' with any of the allowed variants: ["Human", "Json"]"#)); +} + +#[test] +fn message_format_json_forward_stderr() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() { let unused = 0; }") + .build(); + + assert_that(p.cargo("rustc").arg("--release").arg("--bin").arg("foo") + .arg("--message-format").arg("JSON"), + execs().with_status(0) + .with_json(r#" + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]" + }, + "profile":{ + "debug_assertions":false, + "debuginfo":null, + "opt_level":"3", + "overflow_checks": false, + "test":false + }, + "features":[], + "filenames":["[..]"], + "fresh": false + } +"#)); +} + +#[test] +fn no_warn_about_package_metadata() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [package.metadata] + foo = "bar" + a = true + b = 3 + + [package.metadata.another] + bar = 3 + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("[..] foo v0.0.1 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn cargo_build_empty_target() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--target").arg(""), + execs().with_status(101) + .with_stderr_contains("[..] target was empty")); +} + +#[test] +fn build_all_workspace() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("build") + .arg("--all"), + execs().with_status(0) + .with_stderr("[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn build_all_exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + "#) + .file("baz/src/lib.rs", r#" + pub fn baz() { + break_the_build(); + } + "#) + .build(); + + assert_that(p.cargo("build") + .arg("--all") + .arg("--exclude") + .arg("baz"), + execs().with_status(0) + .with_stderr_contains("[..]Compiling foo v0.1.0 [..]") + .with_stderr_contains("[..]Compiling bar v0.1.0 [..]") + .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]")); +} + +#[test] +fn build_all_workspace_implicit_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/c.rs", "fn main() {}") + .file("examples/d.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/e.rs", "fn main() {}") + .file("bar/src/bin/f.rs", "fn main() {}") + .file("bar/examples/g.rs", "fn main() {}") + .file("bar/examples/h.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build") + .arg("--all").arg("--examples"), + execs().with_status(0) + .with_stderr("[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/c"), existing_file()); + assert_that(&p.bin("examples/d"), existing_file()); + assert_that(&p.bin("e"), is_not(existing_file())); + assert_that(&p.bin("f"), is_not(existing_file())); + assert_that(&p.bin("examples/g"), existing_file()); + assert_that(&p.bin("examples/h"), existing_file()); +} + +#[test] +fn build_all_virtual_manifest() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that(p.cargo("build") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn build_virtual_manifest_all_implied() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn build_virtual_manifest_one_project() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("build") + .arg("-p").arg("foo"), + execs().with_status(0) + .with_stderr_does_not_contain("bar") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn build_all_virtual_manifest_implicit_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", "") + .file("foo/src/bin/a.rs", "fn main() {}") + .file("foo/src/bin/b.rs", "fn main() {}") + .file("foo/examples/c.rs", "fn main() {}") + .file("foo/examples/d.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/e.rs", "fn main() {}") + .file("bar/src/bin/f.rs", "fn main() {}") + .file("bar/examples/g.rs", "fn main() {}") + .file("bar/examples/h.rs", "fn main() {}") + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that(p.cargo("build") + .arg("--all").arg("--examples"), + execs().with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/c"), existing_file()); + assert_that(&p.bin("examples/d"), existing_file()); + assert_that(&p.bin("e"), is_not(existing_file())); + assert_that(&p.bin("f"), is_not(existing_file())); + assert_that(&p.bin("examples/g"), existing_file()); + assert_that(&p.bin("examples/h"), existing_file()); +} + +#[test] +fn build_all_member_dependency_same_name() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["a"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#) + .file("a/src/lib.rs", r#" + pub fn a() {} + "#) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that(p.cargo("build") + .arg("--all"), + execs().with_status(0) + .with_stderr("[..] Updating registry `[..]`\n\ + [..] Downloading a v0.1.0 ([..])\n\ + [..] Compiling a v0.1.0\n\ + [..] Compiling a v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n")); +} + +#[test] +fn run_proper_binary() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "main" + [[bin]] + name = "other" + "#) + .file("src/lib.rs", "") + .file("src/bin/main.rs", r#" + fn main() { + panic!("This should never be run."); + } + "#) + .file("src/bin/other.rs", r#" + fn main() { + } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("other"), + execs().with_status(0)); +} + +#[test] +fn run_proper_binary_main_rs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + "#) + .file("src/lib.rs", "") + .file("src/bin/main.rs", r#" + fn main() { + } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn run_proper_alias_binary_from_src() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#) + .file("src/foo.rs", r#" + fn main() { + println!("foo"); + } + "#).file("src/bar.rs", r#" + fn main() { + println!("bar"); + } + "#) + .build(); + + assert_that(p.cargo("build") + .arg("--all"), + execs().with_status(0) + ); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("foo\n")); + assert_that(process(&p.bin("bar")), + execs().with_status(0).with_stdout("bar\n")); +} + +#[test] +fn run_proper_alias_binary_main_rs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#) + .file("src/main.rs", r#" + fn main() { + println!("main"); + } + "#) + .build(); + + assert_that(p.cargo("build") + .arg("--all"), + execs().with_status(0) + ); + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("main\n")); + assert_that(process(&p.bin("bar")), + execs().with_status(0).with_stdout("main\n")); +} + +#[test] +fn run_proper_binary_main_rs_as_foo() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", r#" + fn main() { + panic!("This should never be run."); + } + "#) + .file("src/main.rs", r#" + fn main() { + } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn rustc_wrapper() { + // We don't have /usr/bin/env on Windows. + if cfg!(windows) { return } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build").arg("-v").env("RUSTC_WRAPPER", "/usr/bin/env"), + execs().with_stderr_contains( + "[RUNNING] `/usr/bin/env rustc --crate-name foo [..]") + .with_status(0)); +} + +#[test] +fn cdylib_not_lifted() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let files = if cfg!(windows) { + vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo.dylib"] + } else { + vec!["libfoo.so"] + }; + + for file in files { + println!("checking: {}", file); + assert_that(&p.root().join("target/debug/deps").join(&file), + existing_file()); + } +} + +#[test] +fn cdylib_final_outputs() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo-bar" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let files = if cfg!(windows) { + vec!["foo_bar.dll.lib", "foo_bar.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo_bar.dylib"] + } else { + vec!["libfoo_bar.so"] + }; + + for file in files { + println!("checking: {}", file); + assert_that(&p.root().join("target/debug").join(&file), existing_file()); + } +} + +#[test] +fn wasm32_final_outputs() { + use cargo::core::{Shell, Target, Workspace}; + use cargo::ops::{self, BuildConfig, Context, CompileMode, CompileOptions, Kind, Unit}; + use cargo::util::Config; + use cargo::util::important_paths::find_root_manifest_for_wd; + + let target_triple = "wasm32-unknown-emscripten"; + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo-bar" + authors = [] + version = "0.1.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + // We can't cross-compile the project to wasm target unless we have emscripten installed. + // So here we will not run `cargo build`, but just create cargo_rustc::Context and ask it + // what the target file names would be. + + // Create various stuff required to build cargo_rustc::Context. + let shell = Shell::new(); + let config = Config::new(shell, p.root(), p.root()); + let root = find_root_manifest_for_wd(None, config.cwd()).expect("Can't find the root manifest"); + let ws = Workspace::new(&root, &config).expect("Can't create workspace"); + + let opts = CompileOptions { + target: Some(target_triple), + .. CompileOptions::default(&config, CompileMode::Build) + }; + + let specs = opts.spec.into_package_id_specs(&ws).expect("Can't create specs"); + + let (packages, resolve) = ops::resolve_ws_precisely( + &ws, + None, + opts.features, + opts.all_features, + opts.no_default_features, + &specs, + ).expect("Can't create resolve"); + + let build_config = BuildConfig { + requested_target: Some(target_triple.to_string()), + jobs: 1, + .. BuildConfig::default() + }; + + let pkgid = packages + .package_ids() + .filter(|id| id.name() == "foo-bar") + .collect::>(); + let pkg = packages.get(pkgid[0]).expect("Can't get package"); + + let target = Target::bin_target("foo-bar", p.root().join("src/main.rs"), None); + + let unit = Unit { + pkg: &pkg, + target: &target, + profile: &ws.profiles().dev, + kind: Kind::Target, + }; + let units = vec![unit]; + + // Finally, create the cargo_rustc::Context. + let mut ctx = Context::new( + &ws, + &resolve, + &packages, + &config, + build_config, + ws.profiles(), + ).expect("Can't create context"); + + // Ask the context to resolve target file names. + ctx.probe_target_info(&units).expect("Can't probe target info"); + let target_filenames = ctx.target_filenames(&unit).expect("Can't get target file names"); + + // Verify the result. + let mut expected = vec!["debug/foo-bar.js", "debug/foo_bar.wasm"]; + + assert_eq!(target_filenames.len(), expected.len()); + + let mut target_filenames = target_filenames + .iter() + .map(|&(_, ref link_dst, _)| link_dst.clone().unwrap()) + .collect::>(); + target_filenames.sort(); + expected.sort(); + + for (expected, actual) in expected.iter().zip(target_filenames.iter()) { + assert!( + actual.ends_with(expected), + format!("{:?} does not end with {}", actual, expected) + ); + } +} + +#[test] +fn deterministic_cfg_flags() { + // This bug is non-deterministic + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [features] + default = ["f_a", "f_b", "f_c", "f_d"] + f_a = [] + f_b = [] + f_c = [] + f_d = [] + "#) + .file("build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=cfg_a"); + println!("cargo:rustc-cfg=cfg_b"); + println!("cargo:rustc-cfg=cfg_c"); + println!("cargo:rustc-cfg=cfg_d"); + println!("cargo:rustc-cfg=cfg_e"); + } + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.1.0 [..] +[RUNNING] [..] +[RUNNING] [..] +[RUNNING] `rustc --crate-name foo [..] \ +--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\ +--cfg[..]f_c[..]--cfg[..]f_d[..] \ +--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")); +} + +#[test] +fn explicit_bins_without_paths() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + + [[bin]] + name = "bar" + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn no_bin_in_src_with_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + "#) + .file("src/lib.rs", "") + .file("src/foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr_contains("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `foo` bin, specify bin.path")); +} + + +#[test] +fn dirs_in_bin_dir_with_main_rs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/bar2.rs", "fn main() {}") + .file("src/bin/bar3/main.rs", "fn main() {}") + .file("src/bin/bar4/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("bar2"), existing_file()); + assert_that(&p.bin("bar3"), existing_file()); + assert_that(&p.bin("bar4"), existing_file()); +} + +#[test] +fn dir_and_file_with_same_name_in_bin() { + // this should fail, because we have two binaries with the same name + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/foo/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr_contains("\ +[..]found duplicate binary name foo, but all binary targets must have a unique name[..] +")); +} + +#[test] +fn inferred_path_in_src_bin_foo() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "bar" + # Note, no `path` key! + "#) + .file("src/bin/bar/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("bar"), existing_file()); +} + +#[test] +fn inferred_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "fn main() {}") + .file("examples/bar.rs", "fn main() {}") + .file("examples/baz/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that(&p.bin("examples/bar"), existing_file()); + assert_that(&p.bin("examples/baz"), existing_file()); +} + +#[test] +fn inferred_tests() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "fn main() {}") + .file("tests/bar.rs", "fn main() {}") + .file("tests/baz/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("test").arg("--test=bar").arg("--test=baz"), + execs().with_status(0)); +} + +#[test] +fn inferred_benchmark() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "fn main() {}") + .file("benches/bar.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("bench").arg("--bench=bar"), + execs().with_status(0)); +} + +#[test] +fn inferred_benchmark_from_directory() { + //FIXME: merge with `inferred_benchmark` after fixing #4504 + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "fn main() {}") + .file("benches/bar/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("bench").arg("--bench=bar"), + execs().with_status(0)); +} + +#[test] +fn same_metadata_different_directory() { + // A top-level crate built in two different workspaces should have the + // same metadata hash. + let p = project("foo1") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + let output = t!(String::from_utf8( + t!(p.cargo("build").arg("-v").exec_with_output()) + .stderr, + )); + let metadata = output + .split_whitespace() + .find(|arg| arg.starts_with("metadata=")) + .unwrap(); + + let p = project("foo2") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr_contains( + format!("[..]{}[..]", metadata), + ), + ); +} + +#[test] +fn building_a_dependent_crate_witout_bin_should_fail() { + Package::new("testless", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "testless" + version = "0.1.0" + + [[bin]] + name = "a_bin" + "#) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "[..]can't find `a_bin` bin, specify bin.path" + )); +} + +#[cfg(any(target_os = "macos", target_os = "ios"))] +#[test] +fn uplift_dsym_of_bin_on_mac() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("src/main.rs", "fn main() { panic!(); }") + .file("src/bin/b.rs", "fn main() { panic!(); }") + .file("examples/c.rs", "fn main() { panic!(); }") + .file("tests/d.rs", "fn main() { panic!(); }") + .build(); + + assert_that( + p.cargo("build").arg("--bins").arg("--examples").arg("--tests"), + execs().with_status(0) + ); + assert_that(&p.bin("foo.dSYM"), existing_dir()); + assert_that(&p.bin("b.dSYM"), existing_dir()); + assert!( + p.bin("b.dSYM") + .symlink_metadata() + .expect("read metadata from b.dSYM") + .file_type() + .is_symlink() + ); + assert_that(&p.bin("c.dSYM"), is_not(existing_dir())); + assert_that(&p.bin("d.dSYM"), is_not(existing_dir())); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargo-features.rs b/collector/compile-benchmarks/cargo/tests/cargo-features.rs new file mode 100644 index 000000000..0434b776e --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargo-features.rs @@ -0,0 +1,273 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::ChannelChanger; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn feature_required() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo(), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +consider adding `cargo-features = [\"test-dummy-unstable\"]` to the manifest +")); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +this Cargo does not support nightly features, but if you +switch to nightly channel you can add +`cargo-features = [\"test-dummy-unstable\"]` to enable this feature +")); +} + +#[test] +fn unknown_feature() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["foo"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + unknown cargo feature `foo` +")); +} + +#[test] +fn stable_feature_warns() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["test-dummy-stable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +warning: the cargo feature `test-dummy-stable` is now stable and is no longer \ +necessary to be listed in the manifest +[COMPILING] a [..] +[FINISHED] [..] +")); +} + +#[test] +fn nightly_feature_requires_nightly() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo(), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a [..] +[FINISHED] [..] +")); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +")); +} + +#[test] +fn nightly_feature_requires_nightly_in_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#) + .file("a/src/lib.rs", "") + .build(); + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo(), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a [..] +[COMPILING] b [..] +[FINISHED] [..] +")); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to load source for a dependency on `a` + +Caused by: + Unable to update [..] + +Caused by: + failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +")); +} + +#[test] +fn cant_publish() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo(), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a [..] +[FINISHED] [..] +")); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +")); +} + +#[test] +fn z_flags_rejected() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build") + .arg("-Zprint-im-a-teapot"), + execs().with_status(101) + .with_stderr("\ +error: the `-Z` flag is only accepted on the nightly channel of Cargo +")); + + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zarg"), + execs().with_status(101) + .with_stderr("\ +error: unknown `-Z` flag specified: arg +")); + + assert_that(p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zprint-im-a-teapot"), + execs().with_status(0) + .with_stdout("im-a-teapot = true\n") + .with_stderr("\ +[COMPILING] a [..] +[FINISHED] [..] +")); +} + +#[test] +fn publish_rejected() { + let p = project("foo") + .file("Cargo.toml", r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("package") + .masquerade_as_nightly_cargo(), + execs().with_status(101) + .with_stderr("\ +error: cannot package or publish crates which activate nightly-only cargo features +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargo.rs b/collector/compile-benchmarks/cargo/tests/cargo.rs new file mode 100644 index 000000000..55de7b9c7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargo.rs @@ -0,0 +1,218 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; +use std::str; + +use cargotest::cargo_process; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::{execs, project, Project, basic_bin_manifest}; +use hamcrest::{assert_that, existing_file}; + +#[cfg_attr(windows,allow(dead_code))] +enum FakeKind<'a> { + Executable, + Symlink{target:&'a Path}, +} + +/// Add an empty file with executable flags (and platform-dependent suffix). +/// TODO: move this to `Project` if other cases using this emerge. +fn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind) -> Project { + let path = proj.root().join(dir).join(&format!("{}{}", name, + env::consts::EXE_SUFFIX)); + path.parent().unwrap().mkdir_p(); + match *kind { + FakeKind::Executable => { + File::create(&path).unwrap(); + make_executable(&path); + }, + FakeKind::Symlink{target} => { + make_symlink(&path,target); + } + } + return proj; + + #[cfg(unix)] + fn make_executable(p: &Path) { + use std::os::unix::prelude::*; + + let mut perms = fs::metadata(p).unwrap().permissions(); + let mode = perms.mode(); + perms.set_mode(mode | 0o111); + fs::set_permissions(p, perms).unwrap(); + } + #[cfg(windows)] + fn make_executable(_: &Path) {} + #[cfg(unix)] + fn make_symlink(p: &Path, t: &Path) { + ::std::os::unix::fs::symlink(t,p).expect("Failed to create symlink"); + } + #[cfg(windows)] + fn make_symlink(_: &Path, _: &Path) { + panic!("Not supported") + } +} + +fn path() -> Vec { + env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() +} + +#[test] +fn list_command_looks_at_path() { + let proj = project("list-non-overlapping").build(); + let proj = fake_file(proj, Path::new("path-test"), "cargo-1", &FakeKind::Executable); + let mut pr = cargo_process(); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = pr.arg("-v").arg("--list") + .env("PATH", &path); + let output = output.exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!(output.contains("\n 1\n"), "missing 1: {}", output); +} + +// windows and symlinks don't currently agree that well +#[cfg(unix)] +#[test] +fn list_command_resolves_symlinks() { + use cargotest::support::cargo_exe; + + let proj = project("list-non-overlapping").build(); + let proj = fake_file(proj, Path::new("path-test"), "cargo-2", + &FakeKind::Symlink{target:&cargo_exe()}); + let mut pr = cargo_process(); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = pr.arg("-v").arg("--list") + .env("PATH", &path); + let output = output.exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!(output.contains("\n 2\n"), "missing 2: {}", output); +} + +#[test] +fn find_closest_biuld_to_build() { + let mut pr = cargo_process(); + pr.arg("biuld"); + + assert_that(pr, + execs().with_status(101) + .with_stderr("[ERROR] no such subcommand: `biuld` + +Did you mean `build`? + +")); +} + +// if a subcommand is more than 3 edit distance away, we don't make a suggestion +#[test] +fn find_closest_dont_correct_nonsense() { + let mut pr = cargo_process(); + pr.arg("there-is-no-way-that-there-is-a-command-close-to-this") + .cwd(&paths::root()); + + assert_that(pr, + execs().with_status(101) + .with_stderr("[ERROR] no such subcommand: \ + `there-is-no-way-that-there-is-a-command-close-to-this` +")); +} + +#[test] +fn displays_subcommand_on_error() { + let mut pr = cargo_process(); + pr.arg("invalid-command"); + + assert_that(pr, + execs().with_status(101) + .with_stderr("[ERROR] no such subcommand: `invalid-command` +")); +} + +#[test] +fn override_cargo_home() { + let root = paths::root(); + let my_home = root.join("my_home"); + fs::create_dir(&my_home).unwrap(); + File::create(&my_home.join("config")).unwrap().write_all(br#" + [cargo-new] + name = "foo" + email = "bar" + git = false + "#).unwrap(); + + assert_that(cargo_process() + .arg("new").arg("foo") + .env("USER", "foo") + .env("CARGO_HOME", &my_home), + execs().with_status(0)); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#)); +} + +#[test] +fn cargo_subcommand_env() { + use cargotest::support::cargo_exe; + + let src = format!(r#" + use std::env; + + fn main() {{ + println!("{{}}", env::var("{}").unwrap()); + }} + "#, cargo::CARGO_ENV); + + let p = project("cargo-envtest") + .file("Cargo.toml", &basic_bin_manifest("cargo-envtest")) + .file("src/main.rs", &src) + .build(); + + let target_dir = p.target_debug_dir(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("cargo-envtest"), existing_file()); + + let mut pr = cargo_process(); + let cargo = cargo_exe().canonicalize().unwrap(); + let mut path = path(); + path.push(target_dir); + let path = env::join_paths(path.iter()).unwrap(); + + assert_that(pr.arg("envtest").env("PATH", &path), + execs().with_status(0).with_stdout(cargo.to_str().unwrap())); +} + +#[test] +fn cargo_help() { + assert_that(cargo_process(), + execs().with_status(0)); + assert_that(cargo_process().arg("help"), + execs().with_status(0)); + assert_that(cargo_process().arg("-h"), + execs().with_status(0)); + assert_that(cargo_process().arg("help").arg("build"), + execs().with_status(0)); + assert_that(cargo_process().arg("build").arg("-h"), + execs().with_status(0)); + assert_that(cargo_process().arg("help").arg("-h"), + execs().with_status(0)); + assert_that(cargo_process().arg("help").arg("help"), + execs().with_status(0)); +} + +#[test] +fn explain() { + assert_that(cargo_process().arg("--explain").arg("E0001"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargo_alias_config.rs b/collector/compile-benchmarks/cargo/tests/cargo_alias_config.rs new file mode 100644 index 000000000..0f1a95834 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargo_alias_config.rs @@ -0,0 +1,123 @@ +extern crate cargotest; +extern crate hamcrest; +use cargotest::support::{project, execs, basic_bin_manifest}; +use hamcrest::{assert_that}; + +#[test] +fn alias_incorrect_config_type() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + b-cargo-test = 5 + "#) + .build(); + + assert_that(p.cargo("b-cargo-test").arg("-v"), + execs().with_status(101). + with_stderr_contains("[ERROR] invalid configuration \ +for key `alias.b-cargo-test` +expected a list, but found a integer for [..]")); +} + + +#[test] +fn alias_default_config_overrides_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + b = "not_build" + "#) + .build(); + + assert_that(p.cargo("b").arg("-v"), + execs().with_status(0). + with_stderr_contains("[COMPILING] foo v0.5.0 [..]")); +} + +#[test] +fn alias_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + b-cargo-test = "build" + "#) + .build(); + + assert_that(p.cargo("b-cargo-test").arg("-v"), + execs().with_status(0). + with_stderr_contains("[COMPILING] foo v0.5.0 [..] +[RUNNING] `rustc --crate-name foo [..]")); +} + +#[test] +fn alias_list_test() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + b-cargo-test = ["build", "--release"] + "#) + .build(); + + assert_that(p.cargo("b-cargo-test").arg("-v"), + execs().with_status(0). + with_stderr_contains("[COMPILING] foo v0.5.0 [..]"). + with_stderr_contains("[RUNNING] `rustc --crate-name [..]") + ); +} + +#[test] +fn alias_with_flags_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + b-cargo-test = "build --release" + "#) + .build(); + + assert_that(p.cargo("b-cargo-test").arg("-v"), + execs().with_status(0). + with_stderr_contains("[COMPILING] foo v0.5.0 [..]"). + with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") + ); +} + +#[test] +fn cant_shadow_builtin() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() { + }"#) + .file(".cargo/config",r#" + [alias] + build = "fetch" + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/Cargo.toml b/collector/compile-benchmarks/cargo/tests/cargotest/Cargo.toml new file mode 100644 index 000000000..fda9ddb53 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "cargotest" +version = "0.1.0" +authors = ["Alex Crichton "] + +[lib] +path = "lib.rs" + +[dependencies] +cargo = { path = "../.." } +filetime = "0.1" +flate2 = "0.2" +git2 = { version = "0.6", default-features = false } +hamcrest = "=0.1.1" +hex = "0.2" +log = "0.3" +serde_json = "1.0" +tar = { version = "0.4", default-features = false } +url = "1.1" diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/install.rs b/collector/compile-benchmarks/cargo/tests/cargotest/install.rs new file mode 100644 index 000000000..12a396972 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/install.rs @@ -0,0 +1,30 @@ +use std::fmt; +use std::path::{PathBuf, Path}; + +use hamcrest::{Matcher, MatchResult, existing_file}; +use support::paths; + +pub use self::InstalledExe as has_installed_exe; + +pub fn cargo_home() -> PathBuf { + paths::home().join(".cargo") +} + +pub struct InstalledExe(pub &'static str); + +fn exe(name: &str) -> String { + if cfg!(windows) {format!("{}.exe", name)} else {name.to_string()} +} + +impl> Matcher

for InstalledExe { + fn matches(&self, path: P) -> MatchResult { + let path = path.as_ref().join("bin").join(exe(self.0)); + existing_file().matches(&path) + } +} + +impl fmt::Display for InstalledExe { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "installed exe `{}`", self.0) + } +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/lib.rs b/collector/compile-benchmarks/cargo/tests/cargotest/lib.rs new file mode 100644 index 000000000..09f17b20e --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/lib.rs @@ -0,0 +1,90 @@ +#![deny(warnings)] + +extern crate cargo; +extern crate filetime; +extern crate flate2; +extern crate git2; +extern crate hamcrest; +extern crate hex; +#[macro_use] +extern crate serde_json; +extern crate tar; +extern crate url; + +use std::ffi::OsStr; +use std::time::Duration; + +use cargo::util::Rustc; +use std::path::PathBuf; + +pub mod support; +pub mod install; + +thread_local!(pub static RUSTC: Rustc = Rustc::new(PathBuf::from("rustc"), None).unwrap()); + +pub fn rustc_host() -> String { + RUSTC.with(|r| r.host.clone()) +} + +pub fn is_nightly() -> bool { + RUSTC.with(|r| { + r.verbose_version.contains("-nightly") || + r.verbose_version.contains("-dev") + }) +} + +pub fn process>(t: T) -> cargo::util::ProcessBuilder { + _process(t.as_ref()) +} + +fn _process(t: &OsStr) -> cargo::util::ProcessBuilder { + let mut p = cargo::util::process(t); + p.cwd(&support::paths::root()) + .env_remove("CARGO_HOME") + .env("HOME", support::paths::home()) + .env("CARGO_HOME", support::paths::home().join(".cargo")) + .env("__CARGO_TEST_ROOT", support::paths::root()) + + // Force cargo to think it's on the stable channel for all tests, this + // should hopefully not suprise us as we add cargo features over time and + // cargo rides the trains + .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") + + .env_remove("__CARGO_DEFAULT_LIB_METADATA") + .env_remove("RUSTC") + .env_remove("RUSTDOC") + .env_remove("RUSTC_WRAPPER") + .env_remove("RUSTFLAGS") + .env_remove("CARGO_INCREMENTAL") + .env_remove("XDG_CONFIG_HOME") // see #2345 + .env("GIT_CONFIG_NOSYSTEM", "1") // keep trying to sandbox ourselves + .env_remove("EMAIL") + .env_remove("MFLAGS") + .env_remove("MAKEFLAGS") + .env_remove("CARGO_MAKEFLAGS") + .env_remove("GIT_AUTHOR_NAME") + .env_remove("GIT_AUTHOR_EMAIL") + .env_remove("GIT_COMMITTER_NAME") + .env_remove("GIT_COMMITTER_EMAIL") + .env_remove("CARGO_TARGET_DIR") // we assume 'target' + .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows + return p +} + +pub trait ChannelChanger: Sized { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self; +} + +impl ChannelChanger for cargo::util::ProcessBuilder { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { + self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") + } +} + +pub fn cargo_process() -> cargo::util::ProcessBuilder { + process(&support::cargo_exe()) +} + +pub fn sleep_ms(ms: u64) { + std::thread::sleep(Duration::from_millis(ms)); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/cross_compile.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/cross_compile.rs new file mode 100644 index 000000000..fc274cb42 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/cross_compile.rs @@ -0,0 +1,131 @@ +use std::env; +use std::process::Command; +use std::sync::{Once, ONCE_INIT}; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering}; + +use support::{project, main_file, basic_bin_manifest}; + +pub fn disabled() -> bool { + // First, disable if ./configure requested so + match env::var("CFG_DISABLE_CROSS_TESTS") { + Ok(ref s) if *s == "1" => return true, + _ => {} + } + + // Right now the windows bots cannot cross compile due to the mingw setup, + // so we disable ourselves on all but macos/linux setups where the rustc + // install script ensures we have both architectures + if !(cfg!(target_os = "macos") || + cfg!(target_os = "linux") || + cfg!(target_env = "msvc")) { + return true; + } + + // It's not particularly common to have a cross-compilation setup, so + // try to detect that before we fail a bunch of tests through no fault + // of the user. + static CAN_RUN_CROSS_TESTS: AtomicBool = ATOMIC_BOOL_INIT; + static CHECK: Once = ONCE_INIT; + + let cross_target = alternate(); + + CHECK.call_once(|| { + let p = project("cross_test") + .file("Cargo.toml", &basic_bin_manifest("cross_test")) + .file("src/cross_test.rs", &main_file(r#""testing!""#, &[])) + .build(); + + let result = p.cargo("build") + .arg("--target").arg(&cross_target) + .exec_with_output(); + + if result.is_ok() { + CAN_RUN_CROSS_TESTS.store(true, Ordering::SeqCst); + } + }); + + if CAN_RUN_CROSS_TESTS.load(Ordering::SeqCst) { + // We were able to compile a simple project, so the user has the + // necessary std:: bits installed. Therefore, tests should not + // be disabled. + return false; + } + + // We can't compile a simple cross project. We want to warn the user + // by failing a single test and having the remainder of the cross tests + // pass. We don't use std::sync::Once here because panicing inside its + // call_once method would poison the Once instance, which is not what + // we want. + static HAVE_WARNED: AtomicBool = ATOMIC_BOOL_INIT; + + if HAVE_WARNED.swap(true, Ordering::SeqCst) { + // We are some other test and somebody else is handling the warning. + // Just disable the current test. + return true; + } + + // We are responsible for warning the user, which we do by panicing. + let rustup_available = Command::new("rustup").output().is_ok(); + + let linux_help = if cfg!(target_os = "linux") { + " + +You may need to install runtime libraries for your Linux distribution as well.".to_string() + } else { + "".to_string() + }; + + let rustup_help = if rustup_available { + format!(" + +Alternatively, you can install the necessary libraries for cross-compilation with + + rustup target add {}{}", cross_target, linux_help) + } else { + "".to_string() + }; + + panic!("Cannot cross compile to {}. + +This failure can be safely ignored. If you would prefer to not see this +failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".{} +", cross_target, rustup_help); +} + +pub fn alternate() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "i686", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} + +pub fn alternate_arch() -> &'static str { + match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "x86", + _ => unreachable!(), + } +} + +pub fn host() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "i686", + "x86_64" => "x86_64", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/git.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/git.rs new file mode 100644 index 000000000..439457c23 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/git.rs @@ -0,0 +1,142 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use url::Url; +use git2; + +use cargo::util::ProcessError; +use support::{ProjectBuilder, Project, project, path2url}; + +#[must_use] +pub struct RepoBuilder { + repo: git2::Repository, + files: Vec, +} + +pub struct Repository(git2::Repository); + +pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } + +impl RepoBuilder { + pub fn init(p: &Path) -> RepoBuilder { + t!(fs::create_dir_all(p.parent().unwrap())); + let repo = t!(git2::Repository::init(p)); + { + let mut config = t!(repo.config()); + t!(config.set_str("user.name", "name")); + t!(config.set_str("user.email", "email")); + } + RepoBuilder { repo: repo, files: Vec::new() } + } + + pub fn file(self, path: &str, contents: &str) -> RepoBuilder { + let mut me = self.nocommit_file(path, contents); + me.files.push(PathBuf::from(path)); + me + } + + pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { + let dst = self.repo.workdir().unwrap().join(path); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)).write_all(contents.as_bytes())); + self + } + + pub fn build(self) -> Repository { + { + let mut index = t!(self.repo.index()); + for file in self.files.iter() { + t!(index.add_path(file)); + } + t!(index.write()); + let id = t!(index.write_tree()); + let tree = t!(self.repo.find_tree(id)); + let sig = t!(self.repo.signature()); + t!(self.repo.commit(Some("HEAD"), &sig, &sig, + "Initial commit", &tree, &[])); + } + let RepoBuilder{ repo, .. } = self; + Repository(repo) + } +} + +impl Repository { + pub fn root(&self) -> &Path { + self.0.workdir().unwrap() + } + + pub fn url(&self) -> Url { + path2url(self.0.workdir().unwrap().to_path_buf()) + } +} + +pub fn new(name: &str, callback: F) -> Result + where F: FnOnce(ProjectBuilder) -> ProjectBuilder +{ + let mut git_project = project(name); + git_project = callback(git_project); + let git_project = git_project.build(); + + let repo = t!(git2::Repository::init(&git_project.root())); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + add(&repo); + commit(&repo); + Ok(git_project) +} + +pub fn add(repo: &git2::Repository) { + // FIXME(libgit2/libgit2#2514): apparently add_all will add all submodules + // as well, and then fail b/c they're a directory. As a stopgap, we just + // ignore all submodules. + let mut s = t!(repo.submodules()); + for submodule in s.iter_mut() { + t!(submodule.add_to_index(false)); + } + let mut index = t!(repo.index()); + t!(index.add_all(["*"].iter(), git2::ADD_DEFAULT, + Some(&mut (|a, _b| { + if s.iter().any(|s| a.starts_with(s.path())) {1} else {0} + })))); + t!(index.write()); +} + +pub fn add_submodule<'a>(repo: &'a git2::Repository, url: &str, + path: &Path) -> git2::Submodule<'a> +{ + let path = path.to_str().unwrap().replace(r"\", "/"); + let mut s = t!(repo.submodule(url, Path::new(&path), false)); + let subrepo = t!(s.open()); + t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); + let mut origin = t!(subrepo.find_remote("origin")); + t!(origin.fetch(&[], None, None)); + t!(subrepo.checkout_head(None)); + t!(s.add_finalize()); + return s; +} + +pub fn commit(repo: &git2::Repository) -> git2::Oid { + let tree_id = t!(t!(repo.index()).write_tree()); + let sig = t!(repo.signature()); + let mut parents = Vec::new(); + match repo.head().ok().map(|h| h.target().unwrap()) { + Some(parent) => parents.push(t!(repo.find_commit(parent))), + None => {} + } + let parents = parents.iter().collect::>(); + t!(repo.commit(Some("HEAD"), &sig, &sig, "test", + &t!(repo.find_tree(tree_id)), + &parents)) +} + +pub fn tag(repo: &git2::Repository, name: &str) { + let head = repo.head().unwrap().target().unwrap(); + t!(repo.tag(name, + &t!(repo.find_object(head, None)), + &t!(repo.signature()), + "make a new tag", + false)); +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/mod.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/mod.rs new file mode 100644 index 000000000..4de27a48b --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/mod.rs @@ -0,0 +1,873 @@ +use std::env; +use std::error::Error; +use std::ffi::OsStr; +use std::fmt; +use std::fs; +use std::io::prelude::*; +use std::os; +use std::path::{Path, PathBuf}; +use std::process::Output; +use std::str; +use std::usize; + +use serde_json::{self, Value}; +use url::Url; +use hamcrest as ham; +use cargo::util::ProcessBuilder; +use cargo::util::{CargoError, CargoErrorKind, ProcessError}; + +use support::paths::CargoPathExt; + +#[macro_export] +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +pub mod paths; +pub mod git; +pub mod registry; +pub mod cross_compile; +pub mod publish; + +/* + * + * ===== Builders ===== + * + */ + +#[derive(PartialEq,Clone)] +struct FileBuilder { + path: PathBuf, + body: String +} + +impl FileBuilder { + pub fn new(path: PathBuf, body: &str) -> FileBuilder { + FileBuilder { path: path, body: body.to_string() } + } + + fn mk(&self) { + self.dirname().mkdir_p(); + + let mut file = fs::File::create(&self.path).unwrap_or_else(|e| { + panic!("could not create file {}: {}", self.path.display(), e) + }); + + t!(file.write_all(self.body.as_bytes())); + } + + fn dirname(&self) -> &Path { + self.path.parent().unwrap() + } +} + +#[derive(PartialEq,Clone)] +struct SymlinkBuilder { + dst: PathBuf, + src: PathBuf, +} + +impl SymlinkBuilder { + pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { dst: dst, src: src } + } + + #[cfg(unix)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::unix::fs::symlink(&self.dst, &self.src)); + } + + #[cfg(windows)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } + + fn dirname(&self) -> &Path { + self.src.parent().unwrap() + } +} + +#[derive(PartialEq,Clone)] +pub struct Project{ + root: PathBuf, +} + +#[must_use] +#[derive(PartialEq,Clone)] +pub struct ProjectBuilder { + name: String, + root: Project, + files: Vec, + symlinks: Vec, +} + +impl ProjectBuilder { + pub fn root(&self) -> PathBuf { + self.root.root() + } + + pub fn build_dir(&self) -> PathBuf { + self.root.build_dir() + } + + pub fn target_debug_dir(&self) -> PathBuf { + self.root.target_debug_dir() + } + + pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { + ProjectBuilder { + name: name.to_string(), + root: Project{ root }, + files: vec![], + symlinks: vec![], + } + } + + pub fn file>(mut self, path: B, + body: &str) -> Self { + self._file(path.as_ref(), body); + self + } + + fn _file(&mut self, path: &Path, body: &str) { + self.files.push(FileBuilder::new(self.root.root.join(path), body)); + } + + pub fn symlink>(mut self, dst: T, + src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new(self.root.root.join(dst), + self.root.root.join(src))); + self + } + + pub fn build(self) -> Project { + // First, clean the directory if it already exists + self.rm_root(); + + // Create the empty directory + self.root.root.mkdir_p(); + + for file in self.files.iter() { + file.mk(); + } + + for symlink in self.symlinks.iter() { + symlink.mk(); + } + + let ProjectBuilder{ name: _, root, files: _, symlinks: _, .. } = self; + root + } + + fn rm_root(&self) { + self.root.root.rm_rf() + } +} + +impl Project { + pub fn root(&self) -> PathBuf { + self.root.clone() + } + + pub fn build_dir(&self) -> PathBuf { + self.root.join("target") + } + + pub fn target_debug_dir(&self) -> PathBuf { + self.build_dir().join("debug") + } + + pub fn url(&self) -> Url { path2url(self.root()) } + + pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { + let prefix = Project::get_lib_prefix(kind); + + let extension = Project::get_lib_extension(kind); + + let lib_file_name = format!("{}{}.{}", + prefix, + name, + extension); + + self.target_debug_dir() + .join("examples") + .join(&lib_file_name) + } + + pub fn bin(&self, b: &str) -> PathBuf { + self.build_dir().join("debug").join(&format!("{}{}", b, + env::consts::EXE_SUFFIX)) + } + + pub fn release_bin(&self, b: &str) -> PathBuf { + self.build_dir().join("release").join(&format!("{}{}", b, + env::consts::EXE_SUFFIX)) + } + + pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { + self.build_dir().join(target).join("debug") + .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) + } + + pub fn change_file(&self, path: &str, body: &str) { + FileBuilder::new(self.root.join(path), body).mk() + } + + pub fn process>(&self, program: T) -> ProcessBuilder { + let mut p = ::process(program); + p.cwd(self.root()); + return p + } + + pub fn cargo(&self, cmd: &str) -> ProcessBuilder { + let mut p = self.process(&cargo_exe()); + p.arg(cmd); + return p; + } + + pub fn read_lockfile(&self) -> String { + let mut buffer = String::new(); + fs::File::open(self.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut buffer).unwrap(); + buffer + } + + fn get_lib_prefix(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "lib", + "staticlib" | "dylib" | "proc-macro" => { + if cfg!(windows) { + "" + } else { + "lib" + } + } + _ => unreachable!() + } + } + + fn get_lib_extension(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "rlib", + "staticlib" => { + if cfg!(windows) { + "lib" + } else { + "a" + } + } + "dylib" | "proc-macro" => { + if cfg!(windows) { + "dll" + } else if cfg!(target_os="macos") { + "dylib" + } else { + "so" + } + } + _ => unreachable!() + } + } +} + +// Generates a project layout +pub fn project(name: &str) -> ProjectBuilder { + ProjectBuilder::new(name, paths::root().join(name)) +} + +// Generates a project layout inside our fake home dir +pub fn project_in_home(name: &str) -> ProjectBuilder { + ProjectBuilder::new(name, paths::home().join(name)) +} + +// === Helpers === + +pub fn main_file(println: &str, deps: &[&str]) -> String { + let mut buf = String::new(); + + for dep in deps.iter() { + buf.push_str(&format!("extern crate {};\n", dep)); + } + + buf.push_str("fn main() { println!("); + buf.push_str(&println); + buf.push_str("); }\n"); + + buf.to_string() +} + +trait ErrMsg { + fn with_err_msg(self, val: String) -> Result; +} + +impl ErrMsg for Result { + fn with_err_msg(self, val: String) -> Result { + match self { + Ok(val) => Ok(val), + Err(err) => Err(format!("{}; original={}", val, err)) + } + } +} + +// Path to cargo executables +pub fn cargo_dir() -> PathBuf { + env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| { + env::current_exe().ok().map(|mut path| { + path.pop(); + if path.ends_with("deps") { + path.pop(); + } + path + }) + }).unwrap_or_else(|| { + panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") + }) +} + +pub fn cargo_exe() -> PathBuf { + cargo_dir().join(format!("cargo{}", env::consts::EXE_SUFFIX)) +} + +/// Returns an absolute path in the filesystem that `path` points to. The +/// returned path does not contain any symlinks in its hierarchy. +/* + * + * ===== Matchers ===== + * + */ + +#[derive(Clone)] +pub struct Execs { + expect_stdout: Option, + expect_stdin: Option, + expect_stderr: Option, + expect_exit_code: Option, + expect_stdout_contains: Vec, + expect_stderr_contains: Vec, + expect_stdout_contains_n: Vec<(String, usize)>, + expect_stdout_not_contains: Vec, + expect_stderr_not_contains: Vec, + expect_json: Option>, +} + +impl Execs { + pub fn with_stdout(mut self, expected: S) -> Execs { + self.expect_stdout = Some(expected.to_string()); + self + } + + pub fn with_stderr(mut self, expected: S) -> Execs { + self._with_stderr(&expected); + self + } + + fn _with_stderr(&mut self, expected: &ToString) { + self.expect_stderr = Some(expected.to_string()); + } + + pub fn with_status(mut self, expected: i32) -> Execs { + self.expect_exit_code = Some(expected); + self + } + + pub fn with_stdout_contains(mut self, expected: S) -> Execs { + self.expect_stdout_contains.push(expected.to_string()); + self + } + + pub fn with_stderr_contains(mut self, expected: S) -> Execs { + self.expect_stderr_contains.push(expected.to_string()); + self + } + + pub fn with_stdout_contains_n(mut self, expected: S, number: usize) -> Execs { + self.expect_stdout_contains_n.push((expected.to_string(), number)); + self + } + + pub fn with_stdout_does_not_contain(mut self, expected: S) -> Execs { + self.expect_stdout_not_contains.push(expected.to_string()); + self + } + + pub fn with_stderr_does_not_contain(mut self, expected: S) -> Execs { + self.expect_stderr_not_contains.push(expected.to_string()); + self + } + + pub fn with_json(mut self, expected: &str) -> Execs { + self.expect_json = Some(expected.split("\n\n").map(|obj| { + obj.parse().unwrap() + }).collect()); + self + } + + fn match_output(&self, actual: &Output) -> ham::MatchResult { + self.match_status(actual) + .and(self.match_stdout(actual)) + .and(self.match_stderr(actual)) + } + + fn match_status(&self, actual: &Output) -> ham::MatchResult { + match self.expect_exit_code { + None => ham::success(), + Some(code) => { + ham::expect( + actual.status.code() == Some(code), + format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", + actual.status, + String::from_utf8_lossy(&actual.stdout), + String::from_utf8_lossy(&actual.stderr))) + } + } + } + + fn match_stdout(&self, actual: &Output) -> ham::MatchResult { + self.match_std(self.expect_stdout.as_ref(), &actual.stdout, + "stdout", &actual.stderr, MatchKind::Exact)?; + for expect in self.expect_stdout_contains.iter() { + self.match_std(Some(expect), &actual.stdout, "stdout", + &actual.stderr, MatchKind::Partial)?; + } + for expect in self.expect_stderr_contains.iter() { + self.match_std(Some(expect), &actual.stderr, "stderr", + &actual.stdout, MatchKind::Partial)?; + } + for &(ref expect, number) in self.expect_stdout_contains_n.iter() { + self.match_std(Some(&expect), &actual.stdout, "stdout", + &actual.stderr, MatchKind::PartialN(number))?; + } + for expect in self.expect_stdout_not_contains.iter() { + self.match_std(Some(expect), &actual.stdout, "stdout", + &actual.stderr, MatchKind::NotPresent)?; + } + for expect in self.expect_stderr_not_contains.iter() { + self.match_std(Some(expect), &actual.stderr, "stderr", + &actual.stdout, MatchKind::NotPresent)?; + } + + if let Some(ref objects) = self.expect_json { + let stdout = str::from_utf8(&actual.stdout) + .map_err(|_| "stdout was not utf8 encoded".to_owned())?; + let lines = stdout.lines().collect::>(); + if lines.len() != objects.len() { + return Err(format!("expected {} json lines, got {}, stdout:\n{}", + objects.len(), lines.len(), stdout)); + } + for (obj, line) in objects.iter().zip(lines) { + self.match_json(obj, line)?; + } + } + Ok(()) + } + + fn match_stderr(&self, actual: &Output) -> ham::MatchResult { + self.match_std(self.expect_stderr.as_ref(), &actual.stderr, + "stderr", &actual.stdout, MatchKind::Exact) + } + + fn match_std(&self, expected: Option<&String>, actual: &[u8], + description: &str, extra: &[u8], + kind: MatchKind) -> ham::MatchResult { + let out = match expected { + Some(out) => out, + None => return ham::success(), + }; + let actual = match str::from_utf8(actual) { + Err(..) => return Err(format!("{} was not utf8 encoded", + description)), + Ok(actual) => actual, + }; + // Let's not deal with \r\n vs \n on windows... + let actual = actual.replace("\r", ""); + let actual = actual.replace("\t", ""); + + match kind { + MatchKind::Exact => { + let a = actual.lines(); + let e = out.lines(); + + let diffs = self.diff_lines(a, e, false); + ham::expect(diffs.is_empty(), + format!("differences:\n\ + {}\n\n\ + other output:\n\ + `{}`", diffs.join("\n"), + String::from_utf8_lossy(extra))) + } + MatchKind::Partial => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut diffs = self.diff_lines(a.clone(), e.clone(), true); + while let Some(..) = a.next() { + let a = self.diff_lines(a.clone(), e.clone(), true); + if a.len() < diffs.len() { + diffs = a; + } + } + ham::expect(diffs.is_empty(), + format!("expected to find:\n\ + {}\n\n\ + did not find in output:\n\ + {}", out, + actual)) + } + MatchKind::PartialN(number) => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut matches = 0; + + while let Some(..) = { + if self.diff_lines(a.clone(), e.clone(), true).is_empty() { + matches += 1; + } + a.next() + } {} + + ham::expect(matches == number, + format!("expected to find {} occurences:\n\ + {}\n\n\ + did not find in output:\n\ + {}", number, out, + actual)) + } + MatchKind::NotPresent => { + ham::expect(!actual.contains(out), + format!("expected not to find:\n\ + {}\n\n\ + but found in output:\n\ + {}", out, + actual)) + } + } + } + + fn match_json(&self, expected: &Value, line: &str) -> ham::MatchResult { + let actual = match line.parse() { + Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), + Ok(actual) => actual, + }; + + match find_mismatch(expected, &actual) { + Some((expected_part, actual_part)) => Err(format!( + "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", + serde_json::to_string_pretty(expected).unwrap(), + serde_json::to_string_pretty(&actual).unwrap(), + serde_json::to_string_pretty(expected_part).unwrap(), + serde_json::to_string_pretty(actual_part).unwrap(), + )), + None => Ok(()), + } + } + + fn diff_lines<'a>(&self, actual: str::Lines<'a>, expected: str::Lines<'a>, + partial: bool) -> Vec { + let actual = actual.take(if partial { + expected.clone().count() + } else { + usize::MAX + }); + zip_all(actual, expected).enumerate().filter_map(|(i, (a,e))| { + match (a, e) { + (Some(a), Some(e)) => { + if lines_match(&e, &a) { + None + } else { + Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) + } + }, + (Some(a), None) => { + Some(format!("{:3} -\n + |{}|\n", i, a)) + }, + (None, Some(e)) => { + Some(format!("{:3} - |{}|\n +\n", i, e)) + }, + (None, None) => panic!("Cannot get here") + } + }).collect() + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum MatchKind { + Exact, + Partial, + PartialN(usize), + NotPresent, +} + +pub fn lines_match(expected: &str, mut actual: &str) -> bool { + let expected = substitute_macros(expected); + for (i, part) in expected.split("[..]").enumerate() { + match actual.find(part) { + Some(j) => { + if i == 0 && j != 0 { + return false + } + actual = &actual[j + part.len()..]; + } + None => { + return false + } + } + } + actual.is_empty() || expected.ends_with("[..]") +} + +#[test] +fn lines_match_works() { + assert!(lines_match("a b", "a b")); + assert!(lines_match("a[..]b", "a b")); + assert!(lines_match("a[..]", "a b")); + assert!(lines_match("[..]", "a b")); + assert!(lines_match("[..]b", "a b")); + + assert!(!lines_match("[..]b", "c")); + assert!(!lines_match("b", "c")); + assert!(!lines_match("b", "cb")); +} + +// Compares JSON object for approximate equality. +// You can use `[..]` wildcard in strings (useful for OS dependent things such +// as paths). You can use a `"{...}"` string literal as a wildcard for +// arbitrary nested JSON (useful for parts of object emitted by other programs +// (e.g. rustc) rather than Cargo itself). Arrays are sorted before comparison. +fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) + -> Option<(&'a Value, &'a Value)> { + use serde_json::Value::*; + match (expected, actual) { + (&Number(ref l), &Number(ref r)) if l == r => None, + (&Bool(l), &Bool(r)) if l == r => None, + (&String(ref l), &String(ref r)) if lines_match(l, r) => None, + (&Array(ref l), &Array(ref r)) => { + if l.len() != r.len() { + return Some((expected, actual)); + } + + let mut l = l.iter().collect::>(); + let mut r = r.iter().collect::>(); + + l.retain(|l| { + match r.iter().position(|r| find_mismatch(l, r).is_none()) { + Some(i) => { + r.remove(i); + false + } + None => true + } + }); + + if l.len() > 0 { + assert!(r.len() > 0); + Some((&l[0], &r[0])) + } else { + assert!(r.len() == 0); + None + } + } + (&Object(ref l), &Object(ref r)) => { + let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); + if !same_keys { + return Some((expected, actual)); + } + + l.values().zip(r.values()) + .filter_map(|(l, r)| find_mismatch(l, r)) + .nth(0) + } + (&Null, &Null) => None, + // magic string literal "{...}" acts as wildcard for any sub-JSON + (&String(ref l), _) if l == "{...}" => None, + _ => Some((expected, actual)), + } + +} + +struct ZipAll { + first: I1, + second: I2, +} + +impl, I2: Iterator> Iterator for ZipAll { + type Item = (Option, Option); + fn next(&mut self) -> Option<(Option, Option)> { + let first = self.first.next(); + let second = self.second.next(); + + match (first, second) { + (None, None) => None, + (a, b) => Some((a, b)) + } + } +} + +fn zip_all, I2: Iterator>(a: I1, b: I2) -> ZipAll { + ZipAll { + first: a, + second: b, + } +} + +impl fmt::Display for Execs { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "execs") + } +} + +impl ham::Matcher for Execs { + fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { + self.matches(&mut process) + } +} + +impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { + fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { + println!("running {}", process); + let res = process.exec_with_output(); + + match res { + Ok(out) => self.match_output(&out), + Err(CargoError(CargoErrorKind::ProcessErrorKind( + ProcessError { output: Some(ref out), .. }), ..)) => { + self.match_output(out) + } + Err(e) => { + let mut s = format!("could not exec process {}: {}", process, e); + match e.cause() { + Some(cause) => s.push_str(&format!("\ncaused by: {}", + cause.description())), + None => {} + } + Err(s) + } + } + } +} + +impl ham::Matcher for Execs { + fn matches(&self, output: Output) -> ham::MatchResult { + self.match_output(&output) + } +} + +pub fn execs() -> Execs { + Execs { + expect_stdout: None, + expect_stderr: None, + expect_stdin: None, + expect_exit_code: None, + expect_stdout_contains: Vec::new(), + expect_stderr_contains: Vec::new(), + expect_stdout_contains_n: Vec::new(), + expect_stdout_not_contains: Vec::new(), + expect_stderr_not_contains: Vec::new(), + expect_json: None, + } +} + +#[derive(Clone)] +pub struct ShellWrites { + expected: String +} + +impl fmt::Display for ShellWrites { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "`{}` written to the shell", self.expected) + } +} + +impl<'a> ham::Matcher<&'a [u8]> for ShellWrites { + fn matches(&self, actual: &[u8]) + -> ham::MatchResult + { + let actual = String::from_utf8_lossy(actual); + let actual = actual.to_string(); + ham::expect(actual == self.expected, actual) + } +} + +pub fn shell_writes(string: T) -> ShellWrites { + ShellWrites { expected: string.to_string() } +} + +pub trait Tap { + fn tap(self, callback: F) -> Self; +} + +impl Tap for T { + fn tap(mut self, callback: F) -> T { + callback(&mut self); + self + } +} + +pub fn basic_bin_manifest(name: &str) -> String { + format!(r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [[bin]] + + name = "{}" + "#, name, name) +} + +pub fn basic_lib_manifest(name: &str) -> String { + format!(r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "{}" + "#, name, name) +} + +pub fn path2url(p: PathBuf) -> Url { + Url::from_file_path(&*p).ok().unwrap() +} + +fn substitute_macros(input: &str) -> String { + let macros = [ + ("[RUNNING]", " Running"), + ("[COMPILING]", " Compiling"), + ("[CREATED]", " Created"), + ("[FINISHED]", " Finished"), + ("[ERROR]", "error:"), + ("[WARNING]", "warning:"), + ("[DOCUMENTING]", " Documenting"), + ("[FRESH]", " Fresh"), + ("[UPDATING]", " Updating"), + ("[ADDING]", " Adding"), + ("[REMOVING]", " Removing"), + ("[DOCTEST]", " Doc-tests"), + ("[PACKAGING]", " Packaging"), + ("[DOWNLOADING]", " Downloading"), + ("[UPLOADING]", " Uploading"), + ("[VERIFYING]", " Verifying"), + ("[ARCHIVING]", " Archiving"), + ("[INSTALLING]", " Installing"), + ("[REPLACING]", " Replacing"), + ("[UNPACKING]", " Unpacking"), + ("[EXE]", if cfg!(windows) {".exe"} else {""}), + ("[/]", if cfg!(windows) {"\\"} else {"/"}), + ]; + let mut result = input.to_owned(); + for &(pat, subst) in macros.iter() { + result = result.replace(pat, subst) + } + return result; +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/paths.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/paths.rs new file mode 100644 index 000000000..8a74fc0f6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/paths.rs @@ -0,0 +1,161 @@ +use std::env; +use std::cell::Cell; +use std::fs; +use std::io::{self, ErrorKind}; +use std::path::{Path, PathBuf}; +use std::sync::{Once, ONCE_INIT}; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; + +use filetime::{self, FileTime}; + +static CARGO_INTEGRATION_TEST_DIR : &'static str = "cit"; +static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; + +thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); + +fn init() { + static GLOBAL_INIT: Once = ONCE_INIT; + thread_local!(static LOCAL_INIT: Cell = Cell::new(false)); + GLOBAL_INIT.call_once(|| { + global_root().mkdir_p(); + }); + LOCAL_INIT.with(|i| { + if i.get() { + return + } + i.set(true); + root().rm_rf(); + home().mkdir_p(); + }) +} + +fn global_root() -> PathBuf { + let mut path = t!(env::current_exe()); + path.pop(); // chop off exe name + path.pop(); // chop off 'debug' + + // If `cargo test` is run manually then our path looks like + // `target/debug/foo`, in which case our `path` is already pointing at + // `target`. If, however, `cargo test --target $target` is used then the + // output is `target/$target/debug/foo`, so our path is pointing at + // `target/$target`. Here we conditionally pop the `$target` name. + if path.file_name().and_then(|s| s.to_str()) != Some("target") { + path.pop(); + } + + path.join(CARGO_INTEGRATION_TEST_DIR) +} + +pub fn root() -> PathBuf { + init(); + global_root().join(&TASK_ID.with(|my_id| format!("t{}", my_id))) +} + +pub fn home() -> PathBuf { + root().join("home") +} + +pub trait CargoPathExt { + fn rm_rf(&self); + fn mkdir_p(&self); + + fn move_into_the_past(&self) { + self.move_in_time(|sec, nsec| (sec - 3600, nsec)) + } + + fn move_into_the_future(&self) { + self.move_in_time(|sec, nsec| (sec + 3600, nsec)) + } + + fn move_in_time(&self, F) + where F: Fn(u64, u32) -> (u64, u32); +} + +impl CargoPathExt for Path { + /* Technically there is a potential race condition, but we don't + * care all that much for our tests + */ + fn rm_rf(&self) { + if !self.exists() { + return + } + + for file in t!(fs::read_dir(self)) { + let file = t!(file); + if file.file_type().map(|m| m.is_dir()).unwrap_or(false) { + file.path().rm_rf(); + } else { + // On windows we can't remove a readonly file, and git will + // often clone files as readonly. As a result, we have some + // special logic to remove readonly files on windows. + do_op(&file.path(), "remove file", |p| fs::remove_file(p)); + } + } + do_op(self, "remove dir", |p| fs::remove_dir(p)); + } + + fn mkdir_p(&self) { + fs::create_dir_all(self).unwrap_or_else(|e| { + panic!("failed to mkdir_p {}: {}", self.display(), e) + }) + } + + fn move_in_time(&self, travel_amount: F) + where F: Fn(u64, u32) -> ((u64, u32)), + { + if self.is_file() { + time_travel(self, &travel_amount); + } else { + recurse(self, &self.join("target"), &travel_amount); + } + + fn recurse(p: &Path, bad: &Path, travel_amount: &F) + where F: Fn(u64, u32) -> ((u64, u32)), + { + if p.is_file() { + time_travel(p, travel_amount) + } else if !p.starts_with(bad) { + for f in t!(fs::read_dir(p)) { + let f = t!(f).path(); + recurse(&f, bad, travel_amount); + } + } + } + + fn time_travel(path: &Path, travel_amount: &F) + where F: Fn(u64, u32) -> ((u64, u32)), + { + let stat = t!(path.metadata()); + + let mtime = FileTime::from_last_modification_time(&stat); + + let (sec, nsec) = travel_amount(mtime.seconds_relative_to_1970(), mtime.nanoseconds()); + let newtime = FileTime::from_seconds_since_1970(sec, nsec); + + // Sadly change_file_times has a failure mode where a readonly file + // cannot have its times changed on windows. + do_op(path, "set file times", + |path| filetime::set_file_times(path, newtime, newtime)); + } + } +} + +fn do_op(path: &Path, desc: &str, mut f: F) + where F: FnMut(&Path) -> io::Result<()> +{ + match f(path) { + Ok(()) => {} + Err(ref e) if cfg!(windows) && + e.kind() == ErrorKind::PermissionDenied => { + let mut p = t!(path.metadata()).permissions(); + p.set_readonly(false); + t!(fs::set_permissions(path, p)); + f(path).unwrap_or_else(|e| { + panic!("failed to {} {}: {}", desc, path.display(), e); + }) + } + Err(e) => { + panic!("failed to {} {}: {}", desc, path.display(), e); + } + } +} diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/publish.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/publish.rs new file mode 100644 index 000000000..b82e5d0d0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/publish.rs @@ -0,0 +1,30 @@ +use std::path::PathBuf; +use std::io::prelude::*; +use std::fs::{self, File}; + +use support::paths; +use support::git::{repo, Repository}; + +use url::Url; + +pub fn setup() -> Repository { + let config = paths::root().join(".cargo/config"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all(br#" + [registry] + token = "api-token" + "#)); + t!(fs::create_dir_all(&upload_path().join("api/v1/crates"))); + + repo(®istry_path()) + .file("config.json", &format!(r#"{{ + "dl": "{0}", + "api": "{0}" + }}"#, upload())) + .build() +} + +fn registry_path() -> PathBuf { paths::root().join("registry") } +pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +pub fn upload_path() -> PathBuf { paths::root().join("upload") } +fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } diff --git a/collector/compile-benchmarks/cargo/tests/cargotest/support/registry.rs b/collector/compile-benchmarks/cargo/tests/cargotest/support/registry.rs new file mode 100644 index 000000000..255fc29fc --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cargotest/support/registry.rs @@ -0,0 +1,278 @@ +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{PathBuf, Path}; + +use flate2::Compression::Default; +use flate2::write::GzEncoder; +use git2; +use hex::ToHex; +use tar::{Builder, Header}; +use url::Url; + +use support::paths; +use support::git::repo; +use cargo::util::Sha256; + +pub fn registry_path() -> PathBuf { paths::root().join("registry") } +pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +pub fn dl_path() -> PathBuf { paths::root().join("dl") } +pub fn dl_url() -> Url { Url::from_file_path(&*dl_path()).ok().unwrap() } + +pub struct Package { + name: String, + vers: String, + deps: Vec, + files: Vec<(String, String)>, + extra_files: Vec<(String, String)>, + yanked: bool, + features: HashMap>, + local: bool, +} + +struct Dependency { + name: String, + vers: String, + kind: String, + target: Option, + features: Vec, +} + +pub fn init() { + let config = paths::home().join(".cargo/config"); + t!(fs::create_dir_all(config.parent().unwrap())); + if fs::metadata(&config).is_ok() { + return + } + t!(t!(File::create(&config)).write_all(format!(r#" + [registry] + token = "api-token" + + [source.crates-io] + registry = 'https://wut' + replace-with = 'dummy-registry' + + [source.dummy-registry] + registry = '{reg}' + "#, reg = registry()).as_bytes())); + + // Init a new registry + let _ = repo(®istry_path()) + .file("config.json", &format!(r#" + {{"dl":"{0}","api":"{0}"}} + "#, dl_url())) + .build(); + fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap(); +} + +impl Package { + pub fn new(name: &str, vers: &str) -> Package { + init(); + Package { + name: name.to_string(), + vers: vers.to_string(), + deps: Vec::new(), + files: Vec::new(), + extra_files: Vec::new(), + yanked: false, + features: HashMap::new(), + local: false, + } + } + + pub fn local(&mut self, local: bool) -> &mut Package { + self.local = local; + self + } + + pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { + self.files.push((name.to_string(), contents.to_string())); + self + } + + pub fn extra_file(&mut self, name: &str, contents: &str) -> &mut Package { + self.extra_files.push((name.to_string(), contents.to_string())); + self + } + + pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.full_dep(name, vers, None, "normal", &[]) + } + + pub fn feature_dep(&mut self, + name: &str, + vers: &str, + features: &[&str]) -> &mut Package { + self.full_dep(name, vers, None, "normal", features) + } + + pub fn target_dep(&mut self, + name: &str, + vers: &str, + target: &str) -> &mut Package { + self.full_dep(name, vers, Some(target), "normal", &[]) + } + + pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.full_dep(name, vers, None, "dev", &[]) + } + + fn full_dep(&mut self, + name: &str, + vers: &str, + target: Option<&str>, + kind: &str, + features: &[&str]) -> &mut Package { + self.deps.push(Dependency { + name: name.to_string(), + vers: vers.to_string(), + kind: kind.to_string(), + target: target.map(|s| s.to_string()), + features: features.iter().map(|s| s.to_string()).collect(), + }); + self + } + + pub fn yanked(&mut self, yanked: bool) -> &mut Package { + self.yanked = yanked; + self + } + + pub fn publish(&self) -> String { + self.make_archive(); + + // Figure out what we're going to write into the index + let deps = self.deps.iter().map(|dep| { + json!({ + "name": dep.name, + "req": dep.vers, + "features": dep.features, + "default_features": true, + "target": dep.target, + "optional": false, + "kind": dep.kind, + }) + }).collect::>(); + let cksum = { + let mut c = Vec::new(); + t!(t!(File::open(&self.archive_dst())).read_to_end(&mut c)); + cksum(&c) + }; + let line = json!({ + "name": self.name, + "vers": self.vers, + "deps": deps, + "cksum": cksum, + "features": self.features, + "yanked": self.yanked, + }).to_string(); + + let file = match self.name.len() { + 1 => format!("1/{}", self.name), + 2 => format!("2/{}", self.name), + 3 => format!("3/{}/{}", &self.name[..1], self.name), + _ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name), + }; + + // Write file/line in the index + let dst = if self.local { + registry_path().join("index").join(&file) + } else { + registry_path().join(&file) + }; + let mut prev = String::new(); + let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)) + .write_all((prev + &line[..] + "\n").as_bytes())); + + // Add the new file to the index + if !self.local { + let repo = t!(git2::Repository::open(®istry_path())); + let mut index = t!(repo.index()); + t!(index.add_path(Path::new(&file))); + t!(index.write()); + let id = t!(index.write_tree()); + + // Commit this change + let tree = t!(repo.find_tree(id)); + let sig = t!(repo.signature()); + let parent = t!(repo.refname_to_id("refs/heads/master")); + let parent = t!(repo.find_commit(parent)); + t!(repo.commit(Some("HEAD"), &sig, &sig, + "Another commit", &tree, + &[&parent])); + } + + return cksum + } + + fn make_archive(&self) { + let mut manifest = format!(r#" + [package] + name = "{}" + version = "{}" + authors = [] + "#, self.name, self.vers); + for dep in self.deps.iter() { + let target = match dep.target { + None => String::new(), + Some(ref s) => format!("target.'{}'.", s), + }; + let kind = match &dep.kind[..] { + "build" => "build-", + "dev" => "dev-", + _ => "" + }; + manifest.push_str(&format!(r#" + [{}{}dependencies.{}] + version = "{}" + "#, target, kind, dep.name, dep.vers)); + } + + let dst = self.archive_dst(); + t!(fs::create_dir_all(dst.parent().unwrap())); + let f = t!(File::create(&dst)); + let mut a = Builder::new(GzEncoder::new(f, Default)); + self.append(&mut a, "Cargo.toml", &manifest); + if self.files.is_empty() { + self.append(&mut a, "src/lib.rs", ""); + } else { + for &(ref name, ref contents) in self.files.iter() { + self.append(&mut a, name, contents); + } + } + for &(ref name, ref contents) in self.extra_files.iter() { + self.append_extra(&mut a, name, contents); + } + } + + fn append(&self, ar: &mut Builder, file: &str, contents: &str) { + self.append_extra(ar, + &format!("{}-{}/{}", self.name, self.vers, file), + contents); + } + + fn append_extra(&self, ar: &mut Builder, path: &str, contents: &str) { + let mut header = Header::new_ustar(); + header.set_size(contents.len() as u64); + t!(header.set_path(path)); + header.set_cksum(); + t!(ar.append(&header, contents.as_bytes())); + } + + pub fn archive_dst(&self) -> PathBuf { + if self.local { + registry_path().join(format!("{}-{}.crate", self.name, + self.vers)) + } else { + dl_path().join(&self.name).join(&self.vers).join("download") + } + } +} + +pub fn cksum(s: &[u8]) -> String { + let mut sha = Sha256::new(); + sha.update(s); + sha.finish().to_hex() +} diff --git a/collector/compile-benchmarks/cargo/tests/cfg.rs b/collector/compile-benchmarks/cargo/tests/cfg.rs new file mode 100644 index 000000000..370ae36f7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cfg.rs @@ -0,0 +1,374 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use std::str::FromStr; +use std::fmt; + +use cargo::util::{Cfg, CfgExpr}; +use cargotest::rustc_host; +use cargotest::support::registry::Package; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +macro_rules! c { + ($a:ident) => ( + Cfg::Name(stringify!($a).to_string()) + ); + ($a:ident = $e:expr) => ( + Cfg::KeyPair(stringify!($a).to_string(), $e.to_string()) + ); +} + +macro_rules! e { + (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*])); + (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*])); + (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*)))); + (($($t:tt)*)) => (e!($($t)*)); + ($($t:tt)*) => (CfgExpr::Value(c!($($t)*))); +} + +fn good(s: &str, expected: T) + where T: FromStr + PartialEq + fmt::Debug, + T::Err: fmt::Display +{ + let c = match T::from_str(s) { + Ok(c) => c, + Err(e) => panic!("failed to parse `{}`: {}", s, e), + }; + assert_eq!(c, expected); +} + +fn bad(s: &str, err: &str) + where T: FromStr + fmt::Display, T::Err: fmt::Display +{ + let e = match T::from_str(s) { + Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg), + Err(e) => e.to_string(), + }; + assert!(e.contains(err), "when parsing `{}`,\n\"{}\" not contained \ + inside: {}", s, err, e); +} + +#[test] +fn cfg_syntax() { + good("foo", c!(foo)); + good("_bar", c!(_bar)); + good(" foo", c!(foo)); + good(" foo ", c!(foo)); + good(" foo = \"bar\"", c!(foo = "bar")); + good("foo=\"\"", c!(foo = "")); + good(" foo=\"3\" ", c!(foo = "3")); + good("foo = \"3 e\"", c!(foo = "3 e")); +} + +#[test] +fn cfg_syntax_bad() { + bad::("", "found nothing"); + bad::(" ", "found nothing"); + bad::("\t", "unexpected character"); + bad::("7", "unexpected character"); + bad::("=", "expected identifier"); + bad::(",", "expected identifier"); + bad::("(", "expected identifier"); + bad::("foo (", "malformed cfg value"); + bad::("bar =", "expected a string"); + bad::("bar = \"", "unterminated string"); + bad::("foo, bar", "malformed cfg value"); +} + +#[test] +fn cfg_expr() { + good("foo", e!(foo)); + good("_bar", e!(_bar)); + good(" foo", e!(foo)); + good(" foo ", e!(foo)); + good(" foo = \"bar\"", e!(foo = "bar")); + good("foo=\"\"", e!(foo = "")); + good(" foo=\"3\" ", e!(foo = "3")); + good("foo = \"3 e\"", e!(foo = "3 e")); + + good("all()", e!(all())); + good("all(a)", e!(all(a))); + good("all(a, b)", e!(all(a, b))); + good("all(a, )", e!(all(a))); + good("not(a = \"b\")", e!(not(a = "b"))); + good("not(all(a))", e!(not(all(a)))); +} + +#[test] +fn cfg_expr_bad() { + bad::(" ", "found nothing"); + bad::(" all", "expected `(`"); + bad::("all(a", "expected `)`"); + bad::("not", "expected `(`"); + bad::("not(a", "expected `)`"); + bad::("a = ", "expected a string"); + bad::("all(not())", "expected identifier"); + bad::("foo(a)", "consider using all() or any() explicitly"); +} + +#[test] +fn cfg_matches() { + assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo, bar)).matches(&[c!(bar)])); + assert!(e!(any(foo, bar)).matches(&[c!(foo)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(not(foo)).matches(&[c!(bar)])); + assert!(e!(not(foo)).matches(&[])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)])); + + assert!(!e!(foo).matches(&[])); + assert!(!e!(foo).matches(&[c!(bar)])); + assert!(!e!(foo).matches(&[c!(fo)])); + assert!(!e!(any(foo)).matches(&[])); + assert!(!e!(any(foo)).matches(&[c!(bar)])); + assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo, bar)).matches(&[c!(bar)])); + assert!(!e!(all(foo, bar)).matches(&[c!(foo)])); + assert!(!e!(all(foo, bar)).matches(&[])); + assert!(!e!(not(bar)).matches(&[c!(bar)])); + assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)])); + assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)])); +} + +#[test] +fn cfg_easy() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = { path = 'b' } + [target."cfg(windows)".dependencies] + b = { path = 'b' } + "#) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn dont_include() { + let other_family = if cfg!(unix) {"windows"} else {"unix"}; + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + b = {{ path = 'b' }} + "#, other_family)) + .file("src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn works_through_the_registry() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0") + .target_dep("foo", "0.1.0", "cfg(unix)") + .target_dep("foo", "0.1.0", "cfg(windows)") + .publish(); + + let p = project("a") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate bar;") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry [..] +[DOWNLOADING] [..] +[DOWNLOADING] [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn ignore_version_from_other_platform() { + let this_family = if cfg!(unix) {"unix"} else {"windows"}; + let other_family = if cfg!(unix) {"windows"} else {"unix"}; + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + + let p = project("a") + .file("Cargo.toml", &format!(r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + foo = "0.1.0" + + [target.'cfg({})'.dependencies] + foo = "0.2.0" + "#, this_family, other_family)) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate foo;") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry [..] +[DOWNLOADING] [..] +[COMPILING] foo v0.1.0 +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn bad_target_spec() { + let p = project("a") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(4)'.dependencies] + bar = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `4` as a cfg expression + +Caused by: + unexpected character in cfg `4`, [..] +")); +} + +#[test] +fn bad_target_spec2() { + let p = project("a") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(foo =)'.dependencies] + bar = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `foo =` as a cfg expression + +Caused by: + expected a string, found nothing +")); +} + +#[test] +fn multiple_match_ok() { + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "unix")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(windows)".dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "windows")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(any(windows, unix))".dependencies] + b = {{ path = 'b' }} + + [target.{}.dependencies] + b = {{ path = 'b' }} + "#, rustc_host())) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn any_ok() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target."cfg(any(windows, unix))".dependencies] + b = { path = 'b' } + "#) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/check-style.sh b/collector/compile-benchmarks/cargo/tests/check-style.sh new file mode 100755 index 000000000..72d7ac65b --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/check-style.sh @@ -0,0 +1,3 @@ +echo "checking for lines over 100 characters..." +find src tests -name '*.rs' | xargs grep '.\{101,\}' && exit 1 +echo "ok" diff --git a/collector/compile-benchmarks/cargo/tests/check.rs b/collector/compile-benchmarks/cargo/tests/check.rs new file mode 100644 index 000000000..37f2ea828 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/check.rs @@ -0,0 +1,461 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::is_nightly; +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +#[test] +fn check_success() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("check"), + execs().with_status(0)); +} + +#[test] +fn check_fail() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::baz(42); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("check"), + execs().with_status(101)); +} + +#[test] +fn custom_derive() { + if !is_nightly() { + return + } + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" +#![feature(proc_macro)] + +#[macro_use] +extern crate bar; + +trait B { + fn b(&self); +} + +#[derive(B)] +struct A; + +fn main() { + let a = A; + a.b(); +} +"#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#) + .file("src/lib.rs", r#" +#![feature(proc_macro, proc_macro_lib)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_derive(B)] +pub fn derive(_input: TokenStream) -> TokenStream { + format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap() +} +"#) + .build(); + + assert_that(foo.cargo("check"), + execs().with_status(0)); +} + +#[test] +fn check_build() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#) + .build(); + + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("check"), + execs().with_status(0)); + assert_that(foo.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn build_check() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#) + .build(); + + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("build"), + execs().with_status(0)); + assert_that(foo.cargo("check"), + execs().with_status(0)); +} + +// Checks that where a project has both a lib and a bin, the lib is only checked +// not built. +#[test] +fn issue_3418() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(foo.cargo("check").arg("-v"), + execs().with_status(0) + .with_stderr_contains("[..] --emit=dep-info,metadata [..]")); +} + +// Some weirdness that seems to be caused by a crate being built as well as +// checked, but in this case with a proc macro too. +#[test] +fn issue_3419() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + rustc-serialize = "*" + "#) + .file("src/lib.rs", r#" + extern crate rustc_serialize; + + use rustc_serialize::Decodable; + + pub fn take() {} + "#) + .file("src/main.rs", r#" + extern crate rustc_serialize; + + extern crate foo; + + #[derive(RustcDecodable)] + pub struct Foo; + + fn main() { + foo::take::(); + } + "#) + .build(); + + Package::new("rustc-serialize", "1.0.0") + .file("src/lib.rs", + r#"pub trait Decodable: Sized { + fn decode(d: &mut D) -> Result; + } + pub trait Decoder { + type Error; + fn read_struct(&mut self, s_name: &str, len: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + } "#).publish(); + + assert_that(p.cargo("check"), + execs().with_status(0)); +} + +// test `cargo rustc --profile check` +#[test] +fn rustc_check() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustc") + .arg("--profile") + .arg("check") + .arg("--") + .arg("--emit=metadata"), + execs().with_status(0)); +} + +#[test] +fn rustc_check_err() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + ::bar::qux(); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustc") + .arg("--profile") + .arg("check") + .arg("--") + .arg("--emit=metadata"), + execs().with_status(101)); +} + +#[test] +fn check_all() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + [dependencies] + b = { path = "b" } + "#) + .file("src/main.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("tests/a.rs", "") + .file("src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/main.rs", "fn main() {}") + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("check").arg("--all").arg("-v"), + execs().with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]") + .with_stderr_contains("[..] --crate-name b b[/]src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name b b[/]src[/]main.rs [..]") + ); +} + +#[test] +fn check_virtual_all_implied() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("check").arg("-v"), + execs().with_status(0) + .with_stderr_contains("[..] --crate-name foo foo[/]src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name bar bar[/]src[/]lib.rs [..]") + ); +} + +#[test] +fn check_all_targets() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "pub fn smth() {}") + .file("examples/example1.rs", "fn main() {}") + .file("tests/test2.rs", "#[test] fn t() {}") + .file("benches/bench3.rs", "") + .build(); + + assert_that(foo.cargo("check").arg("--all-targets").arg("-v"), + execs().with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]") + .with_stderr_contains("[..] --crate-name example1 examples[/]example1.rs [..]") + .with_stderr_contains("[..] --crate-name test2 tests[/]test2.rs [..]") + .with_stderr_contains("[..] --crate-name bench3 benches[/]bench3.rs [..]") + ); +} diff --git a/collector/compile-benchmarks/cargo/tests/clean.rs b/collector/compile-benchmarks/cargo/tests/clean.rs new file mode 100644 index 000000000..0cf552371 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/clean.rs @@ -0,0 +1,239 @@ +extern crate hamcrest; +extern crate cargotest; + +use std::env; + +use cargotest::support::{git, project, execs, main_file, basic_bin_manifest}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; + +#[test] +fn cargo_clean_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.build_dir(), existing_dir()); + + assert_that(p.cargo("clean"), + execs().with_status(0)); + assert_that(&p.build_dir(), is_not(existing_dir())); +} + +#[test] +fn different_dir() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("src/bar/a.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.build_dir(), existing_dir()); + + assert_that(p.cargo("clean").cwd(&p.root().join("src")), + execs().with_status(0).with_stdout("")); + assert_that(&p.build_dir(), is_not(existing_dir())); +} + +#[test] +fn clean_multiple_packages() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + "#) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2") + .arg("-p").arg("foo"), + execs().with_status(0)); + + let d1_path = &p.build_dir().join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir().join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, existing_file()); + assert_that(d2_path, existing_file()); + + assert_that(p.cargo("clean").arg("-p").arg("d1").arg("-p").arg("d2") + .cwd(&p.root().join("src")), + execs().with_status(0).with_stdout("")); + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, is_not(existing_file())); + assert_that(d2_path, is_not(existing_file())); +} + +#[test] +fn clean_release() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0)); + + assert_that(p.cargo("clean").arg("-p").arg("foo"), + execs().with_status(0)); + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0).with_stdout("")); + + assert_that(p.cargo("clean").arg("-p").arg("foo").arg("--release"), + execs().with_status(0)); + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +")); +} + +#[test] +fn build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/main.rs", "fn main() {}") + .file("build.rs", r#" + use std::path::PathBuf; + use std::env; + + fn main() { + let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + if env::var("FIRST").is_ok() { + std::fs::File::create(out.join("out")).unwrap(); + } else { + assert!(!std::fs::metadata(out.join("out")).is_ok()); + } + } + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").env("FIRST", "1"), + execs().with_status(0)); + assert_that(p.cargo("clean").arg("-p").arg("foo"), + execs().with_status(0)); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]build-script-build` +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn clean_git() { + let git = git::new("dep", |project| { + project.file("Cargo.toml", r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + dep = {{ git = '{}' }} + "#, git.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("clean").arg("-p").arg("dep"), + execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn registry() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("clean").arg("-p").arg("bar"), + execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/concurrent.rs b/collector/compile-benchmarks/cargo/tests/concurrent.rs new file mode 100644 index 000000000..27ad4b27e --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/concurrent.rs @@ -0,0 +1,500 @@ +extern crate cargotest; +extern crate git2; +extern crate hamcrest; + +use std::{env, str}; +use std::fs::{self, File}; +use std::io::Write; +use std::net::TcpListener; +use std::process::Stdio; +use std::thread; +use std::sync::mpsc::channel; +use std::time::Duration; + +use cargotest::install::{has_installed_exe, cargo_home}; +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::{assert_that, existing_file}; + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/main.rs", "fn main() {{}}") + .publish(); +} + +#[test] +fn multiple_installs() { + let p = project("foo") + .file("a/Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + "#) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("install").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn concurrent_installs() { + const LOCKED_BUILD: &'static str = "waiting for file lock on build directory"; + + pkg("foo", "0.0.1"); + pkg("bar", "0.0.1"); + + let mut a = cargotest::cargo_process().arg("install").arg("foo").build_command(); + let mut b = cargotest::cargo_process().arg("install").arg("bar").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD)); + assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD)); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn one_install_should_be_bad() { + let p = project("foo") + .file("a/Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("install").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + let (bad, good) = if a.status.code() == Some(101) {(a, b)} else {(b, a)}; + assert_that(bad, execs().with_status(101).with_stderr_contains("\ +[ERROR] binary `foo[..]` already exists in destination as part of `[..]` +")); + assert_that(good, execs().with_status(0).with_stderr_contains("\ +warning: be sure to add `[..]` to your PATH [..] +")); + + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn multiple_registry_fetches() { + let mut pkg = Package::new("bar", "1.0.2"); + for i in 0..10 { + let name = format!("foo{}", i); + Package::new(&name, "1.0.0").publish(); + pkg.dep(&name, "*"); + } + pkg.publish(); + + let p = project("foo") + .file("a/Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("build").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("build").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + let suffix = env::consts::EXE_SUFFIX; + assert_that(&p.root().join("a/target/debug").join(format!("foo{}", suffix)), + existing_file()); + assert_that(&p.root().join("b/target/debug").join(format!("bar{}", suffix)), + existing_file()); +} + +#[test] +fn git_same_repo_different_tags() { + let a = git::new("dep", |project| { + project.file("Cargo.toml", r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#).file("src/lib.rs", "pub fn tag1() {}") + }).unwrap(); + + let repo = git2::Repository::open(&a.root()).unwrap(); + git::tag(&repo, "tag1"); + + File::create(a.root().join("src/lib.rs")).unwrap() + .write_all(b"pub fn tag2() {}").unwrap(); + git::add(&repo); + git::commit(&repo); + git::tag(&repo, "tag2"); + + let p = project("foo") + .file("a/Cargo.toml", &format!(r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag1' }} + "#, a.url())) + .file("a/src/main.rs", "extern crate dep; fn main() { dep::tag1(); }") + .file("b/Cargo.toml", &format!(r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag2' }} + "#, a.url())) + .file("b/src/main.rs", "extern crate dep; fn main() { dep::tag2(); }"); + let p = p.build(); + + let mut a = p.cargo("build").arg("-v").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("build").arg("-v").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn git_same_branch_different_revs() { + let a = git::new("dep", |project| { + project.file("Cargo.toml", r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#).file("src/lib.rs", "pub fn f1() {}") + }).unwrap(); + + let p = project("foo") + .file("a/Cargo.toml", &format!(r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, a.url())) + .file("a/src/main.rs", "extern crate dep; fn main() { dep::f1(); }") + .file("b/Cargo.toml", &format!(r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, a.url())) + .file("b/src/main.rs", "extern crate dep; fn main() { dep::f2(); }"); + let p = p.build(); + + // Generate a Cargo.lock pointing at the current rev, then clear out the + // target directory + assert_that(p.cargo("build").cwd(p.root().join("a")), + execs().with_status(0)); + fs::remove_dir_all(p.root().join("a/target")).unwrap(); + + // Make a new commit on the master branch + let repo = git2::Repository::open(&a.root()).unwrap(); + File::create(a.root().join("src/lib.rs")).unwrap() + .write_all(b"pub fn f2() {}").unwrap(); + git::add(&repo); + git::commit(&repo); + + // Now run both builds in parallel. The build of `b` should pick up the + // newest commit while the build of `a` should use the locked old commit. + let mut a = p.cargo("build").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("build").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn same_project() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", ""); + let p = p.build(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +// Make sure that if Cargo dies while holding a lock that it's released and the +// next Cargo to come in will take over cleanly. +// older win versions don't support job objects, so skip test there +#[test] +#[cfg_attr(target_os = "windows", ignore)] +fn killing_cargo_releases_the_lock() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#) + .file("src/main.rs", "fn main() {}") + .file("build.rs", r#" + use std::net::TcpStream; + + fn main() { + if std::env::var("A").is_ok() { + TcpStream::connect(&std::env::var("ADDR").unwrap()[..]) + .unwrap(); + std::thread::sleep(std::time::Duration::new(10, 0)); + } + } + "#); + let p = p.build(); + + // Our build script will connect to our local TCP socket to inform us that + // it's started and that's how we know that `a` will have the lock + // when we kill it. + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + a.env("ADDR", l.local_addr().unwrap().to_string()).env("A", "a"); + b.env("ADDR", l.local_addr().unwrap().to_string()).env_remove("A"); + + // Spawn `a`, wait for it to get to the build script (at which point the + // lock is held), then kill it. + let mut a = a.spawn().unwrap(); + l.accept().unwrap(); + a.kill().unwrap(); + + // Spawn `b`, then just finish the output of a/b the same way the above + // tests does. + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + // We killed `a`, so it shouldn't succeed, but `b` should have succeeded. + assert!(!a.status.success()); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn debug_release_ok() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + fs::remove_dir_all(p.root().join("target")).unwrap(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").arg("--release").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.0 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(b, execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.0 [..] +[FINISHED] release [optimized] target(s) in [..] +")); +} + +#[test] +fn no_deadlock_with_git_dependencies() { + let dep1 = git::new("dep1", |project| { + project.file("Cargo.toml", r#" + [project] + name = "dep1" + version = "0.5.0" + authors = [] + "#).file("src/lib.rs", "") + }).unwrap(); + + let dep2 = git::new("dep2", |project| { + project.file("Cargo.toml", r#" + [project] + name = "dep2" + version = "0.5.0" + authors = [] + "#).file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep1 = {{ git = '{}' }} + dep2 = {{ git = '{}' }} + "#, dep1.url(), dep2.url())) + .file("src/main.rs", "fn main() { }"); + let p = p.build(); + + let n_concurrent_builds = 5; + + let (tx, rx) = channel(); + for _ in 0..n_concurrent_builds { + let cmd = p.cargo("build").build_command() + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn(); + let tx = tx.clone(); + thread::spawn(move || { + let result = cmd.unwrap().wait_with_output().unwrap(); + tx.send(result).unwrap() + }); + } + + //TODO: use `Receiver::recv_timeout` once it is stable. + let recv_timeout = |chan: &::std::sync::mpsc::Receiver<_>| { + for _ in 0..3000 { + if let Ok(x) = chan.try_recv() { + return x + } + thread::sleep(Duration::from_millis(10)); + } + chan.try_recv().expect("Deadlock!") + }; + + for _ in 0..n_concurrent_builds { + let result = recv_timeout(&rx); + assert_that(result, execs().with_status(0)) + } + +} diff --git a/collector/compile-benchmarks/cargo/tests/config.rs b/collector/compile-benchmarks/cargo/tests/config.rs new file mode 100644 index 000000000..89226ce12 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/config.rs @@ -0,0 +1,28 @@ +extern crate hamcrest; +extern crate cargotest; + +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn read_env_vars_for_config() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + use std::env; + fn main() { + assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); + } + "#) + .build(); + + assert_that(p.cargo("build").env("CARGO_BUILD_JOBS", "100"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/cross-compile.rs b/collector/compile-benchmarks/cargo/tests/cross-compile.rs new file mode 100644 index 000000000..4c7837fc9 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cross-compile.rs @@ -0,0 +1,1007 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use cargo::util::process; +use cargotest::{is_nightly, rustc_host}; +use cargotest::support::{project, execs, basic_bin_manifest, cross_compile}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn simple_cross() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", &format!(r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, cross_compile::alternate())) + .file("src/main.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0)); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that(process(&p.target_bin(&target, "foo")), + execs().with_status(0)); +} + +#[test] +fn simple_cross_config() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file(".cargo/config", &format!(r#" + [build] + target = "{}" + "#, cross_compile::alternate())) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", &format!(r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, cross_compile::alternate())) + .file("src/main.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that(process(&p.target_bin(&target, "foo")), + execs().with_status(0)); +} + +#[test] +fn simple_deps() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar(); } + "#) + .build(); + let _p2 = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("--target").arg(&target), + execs().with_status(0)); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that(process(&p.target_bin(&target, "foo")), + execs().with_status(0)); +} + +#[test] +fn plugin_deps() { + if cross_compile::disabled() { return } + if !is_nightly() { return } + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#) + .file("src/lib.rs", r#" + #![feature(plugin_registrar, quote, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::codemap::Span; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + MacEager::expr(quote_expr!(cx, 1)) + } + "#) + .build(); + let _baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that(foo.cargo("build").arg("--target").arg(&target), + execs().with_status(0)); + assert_that(&foo.target_bin(&target, "foo"), existing_file()); + + assert_that(process(&foo.target_bin(&target, "foo")), + execs().with_status(0)); +} + +#[test] +fn plugin_to_the_max() { + if cross_compile::disabled() { return } + if !is_nightly() { return } + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#) + .file("src/lib.rs", r#" + #![feature(plugin_registrar, quote, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + extern crate baz; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::codemap::Span; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + MacEager::expr(quote_expr!(cx, baz::baz())) + } + "#) + .build(); + let _baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that(foo.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0)); + println!("second"); + assert_that(foo.cargo("build").arg("-v") + .arg("--target").arg(&target), + execs().with_status(0)); + assert_that(&foo.target_bin(&target, "foo"), existing_file()); + + assert_that(process(&foo.target_bin(&target, "foo")), + execs().with_status(0)); +} + +#[test] +fn linker_and_ar() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let p = project("foo") + .file(".cargo/config", &format!(r#" + [target.{}] + ar = "my-ar-tool" + linker = "my-linker-tool" + "#, target)) + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + assert_that(p.cargo("build").arg("--target").arg(&target) + .arg("-v"), + execs().with_status(101) + .with_stderr_contains(&format!("\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc --crate-name foo src[/]foo.rs --crate-type bin \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]{target}[/]debug[/]deps \ + --target {target} \ + -C ar=my-ar-tool -C linker=my-linker-tool \ + -L dependency={dir}[/]target[/]{target}[/]debug[/]deps \ + -L dependency={dir}[/]target[/]debug[/]deps` +", + dir = p.root().display(), + url = p.url(), + target = target, + ))); +} + +#[test] +fn plugin_with_extra_dylib_dep() { + if cross_compile::disabled() { return } + if !is_nightly() { return } + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#) + .file("src/lib.rs", r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#) + .build(); + let _baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that(foo.cargo("build").arg("--target").arg(&target), + execs().with_status(0)); +} + +#[test] +fn cross_tests() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "bar" + "#) + .file("src/bin/bar.rs", &format!(r#" + #[allow(unused_extern_crates)] + extern crate foo; + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + #[test] fn test() {{ main() }} + "#, cross_compile::alternate_arch())) + .file("src/lib.rs", &format!(r#" + use std::env; + pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} + #[test] fn test_foo() {{ foo() }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("test").arg("--target").arg(&target), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]bar-[..][EXE]", foo = p.url(), triple = target)) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test ... ok")); +} + +#[test] +fn no_cross_doctests() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.0.0" + "#) + .file("src/lib.rs", r#" + //! ``` + //! extern crate foo; + //! assert!(true); + //! ``` + "#) + .build(); + + let host_output = format!("\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo +", foo = p.url()); + + println!("a"); + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&host_output)); + + println!("b"); + let target = cross_compile::host(); + assert_that(p.cargo("test").arg("--target").arg(&target), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo +", foo = p.url(), triple = target))); + + println!("c"); + let target = cross_compile::alternate(); + assert_that(p.cargo("test").arg("--target").arg(&target), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +", foo = p.url(), triple = target))); +} + +#[test] +fn simple_cargo_run() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/main.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("run").arg("--target").arg(&target), + execs().with_status(0)); +} + +#[test] +fn cross_with_a_build_script() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#) + .file("build.rs", &format!(r#" + use std::env; + use std::path::PathBuf; + fn main() {{ + assert_eq!(env::var("TARGET").unwrap(), "{0}"); + let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); + path.pop(); + assert!(path.file_name().unwrap().to_str().unwrap() + .starts_with("foo-")); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); + }} + "#, target)) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..]` +[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", target = target, + dir = p.root().display()))); +} + +#[test] +fn build_script_needed_for_host_and_target() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + + [dependencies.d1] + path = "d1" + [build-dependencies.d2] + path = "d2" + "#) + + .file("build.rs", r#" + #[allow(unused_extern_crates)] + extern crate d2; + fn main() { d2::d2(); } + "#) + .file("src/main.rs", " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() { d1::d1(); } + ") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#) + .file("d1/src/lib.rs", " + pub fn d1() {} + ") + .file("d1/build.rs", r#" + use std::env; + fn main() { + let target = env::var("TARGET").unwrap(); + println!("cargo:rustc-flags=-L /path/to/{}", target); + } + "#) + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + + [dependencies.d1] + path = "../d1" + "#) + .file("d2/src/lib.rs", " + #[allow(unused_extern_crates)] + extern crate d1; + pub fn d2() { d1::d1(); } + ") + .build(); + + assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0) + .with_stderr_contains(&format!("\ +[COMPILING] d1 v0.0.0 ({url}/d1)", url = p.url())) + .with_stderr_contains(&format!("\ +[RUNNING] `rustc [..] d1[/]build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]d1-[..]`", + dir = p.root().display())) + .with_stderr_contains(&format!("\ +[RUNNING] `{dir}[/]target[/]debug[/]build[/]d1-[..][/]build-script-build`", + dir = p.root().display())) + .with_stderr_contains("\ +[RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]`") + .with_stderr_contains(&format!("\ +[COMPILING] d2 v0.0.0 ({url}/d2)", url = p.url())) + .with_stderr_contains(&format!("\ +[RUNNING] `rustc [..] d2[/]src[/]lib.rs [..] \ + -L /path/to/{host}`", host = host)) + .with_stderr_contains(&format!("\ +[COMPILING] foo v0.0.0 ({url})", url = p.url())) + .with_stderr_contains(&format!("\ +[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..] \ + -L /path/to/{host}`", dir = p.root().display(), host = host)) + .with_stderr_contains(&format!("\ +[RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..] \ + -L /path/to/{target}`", target = target))); +} + +#[test] +fn build_deps_for_the_right_arch() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.d2] + path = "d2" + "#) + .file("src/main.rs", "extern crate d2; fn main() {}") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + "#) + .file("d1/src/lib.rs", " + pub fn d1() {} + ") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "../d1" + "#) + .file("d2/build.rs", "extern crate d1; fn main() {}") + .file("d2/src/lib.rs", "") + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0)); +} + +#[test] +fn build_script_only_host() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "extern crate d1; fn main() {}") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = "build.rs" + "#) + .file("d1/src/lib.rs", " + pub fn d1() {} + ") + .file("d1/build.rs", r#" + use std::env; + + fn main() { + assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") + .contains("target/debug/build/d1-"), + "bad: {:?}", env::var("OUT_DIR")); + } + "#) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0)); +} + +#[test] +fn plugin_build_script_right_arch() { + if cross_compile::disabled() { return } + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--target").arg(cross_compile::alternate()), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn build_script_with_platform_specific_dependencies() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#) + .file("build.rs", " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() {} + ") + .file("src/lib.rs", "") + .file("d1/Cargo.toml", &format!(r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, host)) + .file("d1/src/lib.rs", " + #[allow(unused_extern_crates)] + extern crate d2; + ") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + "#) + .file("d2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] d2 v0.0.0 ([..]) +[RUNNING] `rustc [..] d2[/]src[/]lib.rs [..]` +[COMPILING] d1 v0.0.0 ([..]) +[RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]` +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.root().display(), target = target))); +} + +#[test] +fn platform_specific_dependencies_do_not_leak() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.d1] + path = "d1" + + [build-dependencies.d1] + path = "d1" + "#) + .file("build.rs", "extern crate d1; fn main() {}") + .file("src/lib.rs", "") + .file("d1/Cargo.toml", &format!(r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, host)) + .file("d1/src/lib.rs", "extern crate d2;") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + "#) + .file("d2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(101) + .with_stderr_contains("\ +[..] can't find crate for `d2`[..]")); +} + +#[test] +fn platform_specific_variables_reflected_in_build_scripts() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [target.{host}.dependencies] + d1 = {{ path = "d1" }} + + [target.{target}.dependencies] + d2 = {{ path = "d2" }} + "#, host = host, target = target)) + .file("build.rs", &format!(r#" + use std::env; + + fn main() {{ + let platform = env::var("TARGET").unwrap(); + let (expected, not_expected) = match &platform[..] {{ + "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), + "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), + _ => panic!("unknown platform") + }}; + + env::var(expected).ok() + .expect(&format!("missing {{}}", expected)); + env::var(not_expected).err() + .expect(&format!("found {{}}", not_expected)); + }} + "#, host = host, target = target)) + .file("src/lib.rs", "") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + links = "d1" + build = "build.rs" + "#) + .file("d1/build.rs", r#" + fn main() { println!("cargo:val=1") } + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + links = "d2" + build = "build.rs" + "#) + .file("d2/build.rs", r#" + fn main() { println!("cargo:val=1") } + "#) + .file("d2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(0)); +} + +#[test] +fn cross_test_dylib() { + if cross_compile::disabled() { return } + + let target = cross_compile::alternate(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#) + .file("tests/test.rs", r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#) + .file("bar/src/lib.rs", &format!(r#" + use std::env; + pub fn baz() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + assert_that(p.cargo("test").arg("--target").arg(&target), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{arch}[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]{arch}[/]debug[/]deps[/]test-[..][EXE]", + dir = p.url(), arch = cross_compile::alternate())) + .with_stdout_contains_n("test foo ... ok", 2)); + +} diff --git a/collector/compile-benchmarks/cargo/tests/cross-publish.rs b/collector/compile-benchmarks/cargo/tests/cross-publish.rs new file mode 100644 index 000000000..4dd0c292a --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/cross-publish.rs @@ -0,0 +1,100 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; +extern crate flate2; +extern crate tar; + +use std::fs::File; +use std::path::PathBuf; +use std::io::prelude::*; + +use cargotest::support::{project, execs, cross_compile, publish}; +use hamcrest::{assert_that, contains}; +use flate2::read::GzDecoder; +use tar::Archive; + +#[test] +fn simple_cross_package() { + if cross_compile::disabled() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#) + .file("src/main.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + + assert_that(p.cargo("package").arg("--target").arg(&target), + execs().with_status(0).with_status(0).with_stderr(&format!( +" Packaging foo v0.0.0 ({dir}) + Verifying foo v0.0.0 ({dir}) + Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + + // Check that the tarball contains the files + let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let entries = ar.entries().unwrap(); + let entry_paths = entries.map(|entry| { + entry.unwrap().path().unwrap().into_owned() + }).collect::>(); + assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml")])); + assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml.orig")])); + assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/src/main.rs")])); +} + +#[test] +fn publish_with_target() { + if cross_compile::disabled() { return } + + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#) + .file("src/main.rs", &format!(r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, cross_compile::alternate_arch())) + .build(); + + let target = cross_compile::alternate(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()) + .arg("--target").arg(&target), + execs().with_status(0).with_stderr(&format!( +" Updating registry `{registry}` + Packaging foo v0.0.0 ({dir}) + Verifying foo v0.0.0 ({dir}) + Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] + Uploading foo v0.0.0 ({dir}) +", dir = p.url(), registry = publish::registry()))); +} diff --git a/collector/compile-benchmarks/cargo/tests/death.rs b/collector/compile-benchmarks/cargo/tests/death.rs new file mode 100644 index 000000000..3d04a24cc --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/death.rs @@ -0,0 +1,137 @@ +extern crate cargotest; +extern crate libc; + +use std::fs; +use std::io::{self, Read}; +use std::net::TcpListener; +use std::process::{Stdio, Child}; +use std::thread; +use std::time::Duration; + +use cargotest::support::project; + +#[cfg(unix)] +fn enabled() -> bool { + true +} + +// On Windows suport for these tests is only enabled through the usage of job +// objects. Support for nested job objects, however, was added in recent-ish +// versions of Windows, so this test may not always be able to succeed. +// +// As a result, we try to add ourselves to a job object here +// can succeed or not. +#[cfg(windows)] +fn enabled() -> bool { + extern crate kernel32; + extern crate winapi; + unsafe { + // If we're not currently in a job, then we can definitely run these + // tests. + let me = kernel32::GetCurrentProcess(); + let mut ret = 0; + let r = kernel32::IsProcessInJob(me, 0 as *mut _, &mut ret); + assert!(r != 0); + if ret == winapi::FALSE { + return true + } + + // If we are in a job, then we can run these tests if we can be added to + // a nested job (as we're going to create a nested job no matter what as + // part of these tests. + // + // If we can't be added to a nested job, then these tests will + // definitely fail, and there's not much we can do about that. + let job = kernel32::CreateJobObjectW(0 as *mut _, 0 as *const _); + assert!(!job.is_null()); + let r = kernel32::AssignProcessToJobObject(job, me); + kernel32::CloseHandle(job); + r != 0 + } +} + +#[test] +fn ctrl_c_kills_everyone() { + if !enabled() { + return + } + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", &format!(r#" + use std::net::TcpStream; + use std::io::Read; + + fn main() {{ + let mut socket = TcpStream::connect("{}").unwrap(); + let _ = socket.read(&mut [0; 10]); + panic!("that read should never return"); + }} + "#, addr)) + .build(); + + let mut cargo = p.cargo("build").build_command(); + cargo.stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1"); + let mut child = cargo.spawn().unwrap(); + + let mut sock = listener.accept().unwrap().0; + ctrl_c(&mut child); + + assert!(!child.wait().unwrap().success()); + match sock.read(&mut [0; 10]) { + Ok(n) => assert_eq!(n, 0), + Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset), + } + + // Ok so what we just did was spawn cargo that spawned a build script, then + // we killed cargo in hopes of it killing the build script as well. If all + // went well the build script is now dead. On Windows, however, this is + // enforced with job objects which means that it may actually be in the + // *process* of being torn down at this point. + // + // Now on Windows we can't completely remove a file until all handles to it + // have been closed. Including those that represent running processes. So if + // we were to return here then there may still be an open reference to some + // file in the build directory. What we want to actually do is wait for the + // build script to *complete* exit. Take care of that by blowing away the + // build directory here, and panicking if we eventually spin too long + // without being able to. + for i in 0..10 { + match fs::remove_dir_all(&p.root().join("target")) { + Ok(()) => return, + Err(e) => println!("attempt {}: {}", i, e), + } + thread::sleep(Duration::from_millis(100)); + } + + panic!("couldn't remove build directory after a few tries, seems like \ + we won't be able to!"); +} + +#[cfg(unix)] +fn ctrl_c(child: &mut Child) { + use libc; + + let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) }; + if r < 0 { + panic!("failed to kill: {}", io::Error::last_os_error()); + } +} + +#[cfg(windows)] +fn ctrl_c(child: &mut Child) { + child.kill().unwrap(); +} diff --git a/collector/compile-benchmarks/cargo/tests/dep-info.rs b/collector/compile-benchmarks/cargo/tests/dep-info.rs new file mode 100644 index 000000000..f2f1f82e0 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/dep-info.rs @@ -0,0 +1,84 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{basic_bin_manifest, main_file, execs, project}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn build_dep_info() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let depinfo_bin_path = &p.bin("foo").with_extension("d"); + + assert_that(depinfo_bin_path, existing_file()); +} + +#[test] +fn build_dep_info_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "lib").with_extension("d"), existing_file()); +} + + +#[test] +fn build_dep_info_rlib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "rlib").with_extension("d"), existing_file()); +} + +#[test] +fn build_dep_info_dylib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "dylib").with_extension("d"), existing_file()); +} diff --git a/collector/compile-benchmarks/cargo/tests/directory.rs b/collector/compile-benchmarks/cargo/tests/directory.rs new file mode 100644 index 000000000..9888bc6f3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/directory.rs @@ -0,0 +1,633 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; + +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::str; + +use cargotest::cargo_process; +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::{Package, cksum}; +use cargotest::support::{project, execs, ProjectBuilder}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#)); +} + +struct VendorPackage { + p: Option, + cksum: Checksum, +} + +#[derive(Serialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl VendorPackage { + fn new(name: &str) -> VendorPackage { + VendorPackage { + p: Some(project(&format!("index/{}", name))), + cksum: Checksum { + package: Some(String::new()), + files: HashMap::new(), + }, + } + } + + fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { + self.p = Some(self.p.take().unwrap().file(name, contents)); + self.cksum.files.insert(name.to_string(), cksum(contents.as_bytes())); + self + } + + fn disable_checksum(&mut self) -> &mut VendorPackage { + self.cksum.package = None; + self + } + + fn build(&mut self) { + let p = self.p.take().unwrap(); + let json = serde_json::to_string(&self.cksum).unwrap(); + let p = p.file(".cargo-checksum.json", &json); + let _ = p.build(); + } +} + +#[test] +fn simple() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn simple_install() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/main.rs", r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#) + .build(); + + assert_that(cargo_process().arg("install").arg("bar"), + execs().with_status(0).with_stderr( +" Installing bar v0.1.0 + Compiling foo v0.1.0 + Compiling bar v0.1.0 + Finished release [optimized] target(s) in [..] secs + Installing [..]bar[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +")); +} + +#[test] +fn simple_install_fail() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + baz = "9.8.7" + "#) + .file("src/main.rs", r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#) + .build(); + + assert_that(cargo_process().arg("install").arg("bar"), + execs().with_status(101).with_stderr( +" Installing bar v0.1.0 +error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]` + +Caused by: + no matching package named `baz` found (required by `bar`) +location searched: registry https://github.com/rust-lang/crates.io-index +version required: ^9.8.7 +")); +} + +#[test] +fn install_without_feature_dep() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + baz = { version = "9.8.7", optional = true } + + [features] + wantbaz = ["baz"] + "#) + .file("src/main.rs", r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#) + .build(); + + assert_that(cargo_process().arg("install").arg("bar"), + execs().with_status(0).with_stderr( +" Installing bar v0.1.0 + Compiling foo v0.1.0 + Compiling bar v0.1.0 + Finished release [optimized] target(s) in [..] secs + Installing [..]bar[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +")); +} + +#[test] +fn not_there() { + setup(); + + let _ = project("index").build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: no matching package named `foo` found (required by `bar`) +location searched: [..] +version required: ^0.1.0 +")); +} + +#[test] +fn multiple() { + setup(); + + VendorPackage::new("foo-0.1.0") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + VendorPackage::new("foo-0.2.0") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn crates_io_then_directory() { + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + let cksum = Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn foo() -> u32 { 0 }") + .publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); + + setup(); + + let mut v = VendorPackage::new("foo"); + v.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#); + v.file("src/lib.rs", "pub fn foo() -> u32 { 1 }"); + v.cksum.package = Some(cksum); + v.build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn crates_io_then_bad_checksum() { + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + Package::new("foo", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +")); +} + +#[test] +fn bad_file_checksum() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + let mut f = t!(File::create(paths::root().join("index/foo/src/lib.rs"))); + t!(f.write_all(b"fn foo() -> u32 { 0 }")); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: the listed checksum of `[..]lib.rs` has changed: +expected: [..] +actual: [..] + +directory sources are not intended to be edited, if modifications are \ +required then it is recommended that [replace] is used with a forked copy of \ +the source +")); +} + +#[test] +fn only_dot_files_ok() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + VendorPackage::new("bar") + .file(".foo", "") + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn git_lock_file_doesnt_change() { + + let git = git::new("git", |p| { + p.file("Cargo.toml", r#" + [project] + name = "git" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + VendorPackage::new("git") + .file("Cargo.toml", r#" + [package] + name = "git" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{0}' }} + "#, git.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let mut lock1 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock1)); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(&format!(r#" + [source.my-git-repo] + git = '{}' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#, git.url()).as_bytes())); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +")); + + let mut lock2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2)); + assert!(lock1 == lock2, "lock files changed"); +} + +#[test] +fn git_override_requires_lockfile() { + VendorPackage::new("git") + .file("Cargo.toml", r#" + [package] + name = "git" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = { git = 'https://example.com/' } + "#) + .file("src/lib.rs", "") + .build(); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.my-git-repo] + git = 'https://example.com/' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#)); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to load source for a dependency on `git` + +Caused by: + Unable to update [..] + +Caused by: + the source my-git-repo requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build + +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/doc.rs b/collector/compile-benchmarks/cargo/tests/doc.rs new file mode 100644 index 000000000..0d215c91a --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/doc.rs @@ -0,0 +1,903 @@ +extern crate cargotest; +extern crate hamcrest; +extern crate cargo; + +use std::str; +use std::fs; + +use cargotest::rustc_host; +use cargotest::support::{project, execs, path2url}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, existing_dir, is_not}; +use cargo::util::{CargoError, CargoErrorKind}; + +#[test] +fn simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0).with_stderr(&format!("\ +[..] foo v0.0.1 ({dir}) +[..] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root())))); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn doc_no_libs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + doc = false + "#) + .file("src/main.rs", r#" + bad code + "#) + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn doc_twice() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0).with_stderr(&format!("\ +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root())))); + + assert_that(p.cargo("doc"), + execs().with_status(0).with_stdout("")) +} + +#[test] +fn doc_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0).with_stderr(&format!("\ +[..] bar v0.0.1 ({dir}/bar) +[..] bar v0.0.1 ({dir}/bar) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root())))); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + + assert_that(p.cargo("doc") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), + execs().with_status(0).with_stdout("")); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); +} + +#[test] +fn doc_no_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("doc").arg("--no-deps"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root())))); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/index.html"), is_not(existing_file())); +} + +#[test] +fn doc_only_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("doc").arg("-v"), + execs().with_status(0)); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn doc_multiple_targets_same_name_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + name = "foo_lib" + "#) + .file("foo/src/lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("--all"), + execs() + .with_status(101) + .with_stderr_contains("[..] library `foo_lib` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]")); +} + +#[test] +fn doc_multiple_targets_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo_lib" + "#) + .file("foo/src/foo_lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("--all"), + execs() + .with_status(101) + .with_stderr_contains("[..] target `foo_lib` [..]") + .with_stderr_contains("[..] binary by package `foo v0.1.0[..]`[..]") + .with_stderr_contains("[..] library by package `bar v0.1.0[..]` [..]")); +} + +#[test] +fn doc_multiple_targets_same_name_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#) + .file("foo/src/foo-cli.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#) + .file("bar/src/foo-cli.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("--all"), + execs() + .with_status(101) + .with_stderr_contains("[..] binary `foo_cli` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]")); +} + +#[test] +fn doc_multiple_targets_same_name_undoced() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#) + .file("foo/src/foo-cli.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + doc = false + "#) + .file("bar/src/foo-cli.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("--all"), + execs().with_status(0)); +} + +#[test] +fn doc_lib_bin_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "fn foo() {}") + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(101) + .with_stderr("\ +[ERROR] The target `foo` is specified as a library and as a binary by package \ +`foo [..]`. It can be documented[..]")); +} + +#[test] +fn doc_dash_p() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "extern crate a;") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "../b" + "#) + .file("a/src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("-p").arg("a"), + execs().with_status(0) + .with_stderr("\ +[..] b v0.0.1 (file://[..]) +[..] b v0.0.1 (file://[..]) +[DOCUMENTING] a v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn doc_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .file("examples/main.rs", "fn main() {}") + .file("tests/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn doc_target() { + const TARGET: &'static str = "arm-unknown-linux-gnueabihf"; + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + #![feature(no_core)] + #![no_core] + + extern { + pub static A: u32; + } + "#) + .build(); + + assert_that(p.cargo("doc").arg("--target").arg(TARGET).arg("--verbose"), + execs().with_status(0)); + assert_that(&p.root().join(&format!("target/{}/doc", TARGET)), existing_dir()); + assert_that(&p.root().join(&format!("target/{}/doc/foo/index.html", TARGET)), existing_file()); +} + +#[test] +fn target_specific_not_documented() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "not rust") + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn output_not_captured() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ") + .build(); + + let error = p.cargo("doc").exec_with_output().err().unwrap(); + if let CargoError(CargoErrorKind::ProcessErrorKind(perr), ..) = error { + let output = perr.output.unwrap(); + let stderr = str::from_utf8(&output.stderr).unwrap(); + + assert!(stderr.contains("☃"), "no snowman\n{}", stderr); + assert!(stderr.contains("unknown start of token"), "no message{}", stderr); + } else { + assert!(false, "an error kind other than ProcessErrorKind was encountered"); + } +} + +#[test] +fn target_specific_documented() { + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = {{ path = "a" }} + [target.{}.dependencies] + a = {{ path = "a" }} + "#, rustc_host())) + .file("src/lib.rs", " + extern crate a; + + /// test + pub fn foo() {} + ") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", " + /// test + pub fn foo() {} + ") + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn no_document_build_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [build-dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ") + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn doc_release() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0)); + assert_that(p.cargo("doc").arg("--release").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc [..] src[/]lib.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +")); +} + +#[test] +fn doc_multiple_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [dependencies.baz] + path = "baz" + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(p.cargo("doc") + .arg("-p").arg("bar") + .arg("-p").arg("baz") + .arg("-v"), + execs().with_status(0)); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/baz/index.html"), existing_file()); +} + +#[test] +fn features() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [features] + foo = ["bar/bar"] + "#) + .file("src/lib.rs", r#" + #[cfg(feature = "foo")] + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + bar = [] + "#) + .file("bar/build.rs", r#" + fn main() { + println!("cargo:rustc-cfg=bar"); + } + "#) + .file("bar/src/lib.rs", r#" + #[cfg(feature = "bar")] + pub fn bar() {} + "#) + .build(); + assert_that(p.cargo("doc").arg("--features").arg("foo"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file()); +} + +#[test] +fn rerun_when_dir_removed() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + /// dox + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + + fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap(); + + assert_that(p.cargo("doc"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn document_only_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + /// dox + pub fn foo() {} + "#) + .file("src/bin/bar.rs", r#" + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + fn main() { foo(); } + "#) + .build(); + assert_that(p.cargo("doc").arg("--lib"), + execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn plugins_no_use_target() { + if !cargotest::is_nightly() { + return + } + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("doc") + .arg("--target=x86_64-unknown-openbsd") + .arg("-v"), + execs().with_status(0)); +} + +#[test] +fn doc_all_workspace() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + // The order in which bar is compiled or documented is not deterministic + assert_that(p.cargo("doc") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")); +} + +#[test] +fn doc_all_virtual_manifest() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + // The order in which foo and bar are documented is not guaranteed + assert_that(p.cargo("doc") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")); +} + +#[test] +fn doc_virtual_manifest_all_implied() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["foo", "bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + // The order in which foo and bar are documented is not guaranteed + assert_that(p.cargo("doc"), + execs().with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")); +} + +#[test] +fn doc_all_member_dependency_same_name() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["a"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#) + .file("a/src/lib.rs", r#" + pub fn a() {} + "#) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that(p.cargo("doc") + .arg("--all"), + execs().with_status(0) + .with_stderr_contains("[..] Updating registry `[..]`") + .with_stderr_contains("[..] Documenting a v0.1.0 ([..])")); +} diff --git a/collector/compile-benchmarks/cargo/tests/features.rs b/collector/compile-benchmarks/cargo/tests/features.rs new file mode 100644 index 000000000..0eae4380b --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/features.rs @@ -0,0 +1,1238 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::paths::CargoPathExt; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn invalid1() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `baz` which is neither a dependency nor another feature +")); +} + +#[test] +fn invalid2() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.bar] + path = "foo" + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Features and dependencies cannot have the same name: `bar` +")); +} + +#[test] +fn invalid3() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.baz] + path = "foo" + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` depends on `baz` which is not an optional dependency. +Consider adding `optional = true` to the dependency +")); +} + +#[test] +fn invalid4() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["bar"] + "#) + .file("src/main.rs", "") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] Package `bar v0.0.1 ([..])` does not have these features: `bar` +")); + + p.change_file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#); + + assert_that(p.cargo("build").arg("--features").arg("test"), + execs().with_status(101).with_stderr("\ +[ERROR] Package `foo v0.0.1 ([..])` does not have these features: `test` +")); +} + +#[test] +fn invalid5() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + optional = true + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dev-dependencies are not allowed to be optional: `bar` +")); +} + +#[test] +fn invalid6() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +")); +} + +#[test] +fn invalid7() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + bar = [] + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +")); +} + +#[test] +fn invalid8() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["foo/bar"] + "#) + .file("src/main.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] feature names may not contain slashes: `foo/bar` +")); +} + +#[test] +fn invalid9() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("bar"), + execs().with_status(0).with_stderr("\ +warning: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with \ +that name, but only optional dependencies can be used as features. [..] + Compiling bar v0.0.1 ([..]) + Compiling foo v0.0.1 ([..]) + Finished dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn invalid10() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["baz"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.baz] + path = "baz" + "#) + .file("bar/src/lib.rs", "") + .file("bar/baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("bar/baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +warning: Package `bar v0.0.1 ([..])` does not have feature `baz`. It has a required dependency with \ +that name, but only optional dependencies can be used as features. [..] + Compiling baz v0.0.1 ([..]) + Compiling bar v0.0.1 ([..]) + Compiling foo v0.0.1 ([..]) + Finished dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn no_transitive_dep_feature_requirement() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + + [features] + default = ["derived/bar/qux"] + "#) + .file("src/main.rs", r#" + extern crate derived; + fn main() { derived::test(); } + "#) + .file("derived/Cargo.toml", r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("derived/src/lib.rs", r#" + extern crate bar; + pub use bar::test; + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + qux = [] + "#) + .file("bar/src/lib.rs", r#" + #[cfg(feature = "qux")] + pub fn test() { print!("test"); } + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] feature names may not contain slashes: `bar/qux` +")); +} + +#[test] +fn no_feature_doesnt_build() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + "#) + .file("src/main.rs", r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("")); + + assert_that(p.cargo("build").arg("--features").arg("bar"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("bar\n")); +} + +#[test] +fn default_feature_pulled_in() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + "#) + .file("src/main.rs", r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("bar\n")); + + assert_that(p.cargo("build").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + assert_that(p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn cyclic_feature() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["default"] + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] Cyclic feature dependency: feature `default` depends on itself +")); +} + +#[test] +fn cyclic_feature2() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar"] + bar = ["foo"] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn groups_on_groups_on_groups() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["f1"] + f1 = ["f2", "bar"] + f2 = ["f3", "f4"] + f3 = ["f5", "f6", "baz"] + f4 = ["f5", "f7"] + f5 = ["f6"] + f6 = ["f7"] + f7 = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} + +#[test] +fn many_cli_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("bar baz"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} + +#[test] +fn union_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + features = ["f1"] + [dependencies.d2] + path = "d2" + features = ["f2"] + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate d1; + extern crate d2; + fn main() { + d2::f1(); + d2::f2(); + } + "#) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d2"] + + [dependencies.d2] + path = "../d2" + features = ["f1"] + optional = true + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + f2 = [] + "#) + .file("d2/src/lib.rs", r#" + #[cfg(feature = "f1")] pub fn f1() {} + #[cfg(feature = "f2")] pub fn f2() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] d2 v0.0.1 ({dir}/d2) +[COMPILING] d1 v0.0.1 ({dir}/d1) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} + +#[test] +fn many_features_no_rebuilds() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies.a] + path = "a" + features = ["fall"] + "#) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + ftest = [] + ftest2 = [] + fall = ["ftest", "ftest2"] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] a v0.1.0 ({dir}/a) +[COMPILING] b v0.1.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + p.root().move_into_the_past(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[FRESH] a v0.1.0 ([..]/a) +[FRESH] b v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +// Tests that all cmd lines work with `--features ""` +#[test] +fn empty_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--features").arg(""), + execs().with_status(0)); +} + +// Tests that all cmd lines work with `--features ""` +#[test] +fn transitive_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", " + extern crate bar; + fn main() { bar::baz(); } + ") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + baz = [] + "#) + .file("bar/src/lib.rs", r#" + #[cfg(feature = "baz")] + pub fn baz() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn everything_in_the_lockfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d1/f1"] + f2 = ["d2"] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + optional = true + [dependencies.d3] + path = "d3" + optional = true + "#) + .file("src/main.rs", "fn main() {}") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.2" + authors = [] + "#) + .file("d2/src/lib.rs", "") + .file("d3/Cargo.toml", r#" + [package] + name = "d3" + version = "0.0.3" + authors = [] + + [features] + f3 = [] + "#) + .file("d3/src/lib.rs", "") + .build(); + + assert_that(p.cargo("fetch"), execs().with_status(0)); + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + t!(t!(File::open(&loc)).read_to_string(&mut lockfile)); + assert!(lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile); + assert!(lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile); + assert!(lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile); +} + +#[test] +fn no_rebuild_when_frobbing_default_feature() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#) + .file("src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = ["f1"], default-features = false } + "#) + .file("b/src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn unions_work_with_no_default_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#) + .file("src/lib.rs", r#" + extern crate a; + pub fn foo() { a::a(); } + "#) + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = [], default-features = false } + "#) + .file("b/src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#) + .file("a/src/lib.rs", r#" + #[cfg(feature = "f1")] + pub fn a() {} + "#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn optional_and_dev_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + [dev-dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] test v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn activating_feature_activates_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + + [features] + a = ["foo/a"] + "#) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::bar(); + } + ") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + "#) + .file("foo/src/lib.rs", r#" + #[cfg(feature = "a")] + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("build").arg("--features").arg("a").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn dep_feature_in_cmd_line() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + "#) + .file("src/main.rs", r#" + extern crate derived; + fn main() { derived::test(); } + "#) + .file("derived/Cargo.toml", r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [features] + default = [] + derived-feat = ["bar/some-feat"] + "#) + .file("derived/src/lib.rs", r#" + extern crate bar; + pub use bar::test; + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + some-feat = [] + "#) + .file("bar/src/lib.rs", r#" + #[cfg(feature = "some-feat")] + pub fn test() { print!("test"); } + "#) + .build(); + + // The foo project requires that feature "some-feat" in "bar" is enabled. + // Building without any features enabled should fail: + assert_that(p.cargo("build"), + execs().with_status(101)); + + // We should be able to enable the feature "derived-feat", which enables "some-feat", + // on the command line. The feature is enabled, thus building should be successful: + assert_that(p.cargo("build").arg("--features").arg("derived/derived-feat"), + execs().with_status(0)); + + // Trying to enable features of transitive dependencies is an error + assert_that(p.cargo("build").arg("--features").arg("bar/some-feat"), + execs().with_status(101).with_stderr("\ +[ERROR] Package `foo v0.0.1 ([..])` does not have these features: `bar` +")); + + // Hierarchical feature specification should still be disallowed + assert_that(p.cargo("build").arg("--features").arg("derived/bar/some-feat"), + execs().with_status(101).with_stderr("\ +[ERROR] feature names may not contain slashes: `bar/some-feat` +")); +} + +#[test] +fn all_features_flag_enables_all_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = [] + bar = [] + + [dependencies.baz] + path = "baz" + optional = true + "#) + .file("src/main.rs", r#" + #[cfg(feature = "foo")] + pub fn foo() {} + + #[cfg(feature = "bar")] + pub fn bar() { + extern crate baz; + baz::baz(); + } + + fn main() { + foo(); + bar(); + } + "#) + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that(p.cargo("build").arg("--all-features"), + execs().with_status(0)); +} + +#[test] +fn many_cli_features_comma_delimited() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("bar,baz"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} + +#[test] +fn many_cli_features_comma_and_space_delimited() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + + [dependencies.bam] + path = "bam" + optional = true + + [dependencies.bap] + path = "bap" + optional = true + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + #[allow(unused_extern_crates)] + extern crate bam; + #[allow(unused_extern_crates)] + extern crate bap; + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", "pub fn baz() {}") + .file("bam/Cargo.toml", r#" + [package] + name = "bam" + version = "0.0.1" + authors = [] + "#) + .file("bam/src/lib.rs", "pub fn bam() {}") + .file("bap/Cargo.toml", r#" + [package] + name = "bap" + version = "0.0.1" + authors = [] + "#) + .file("bap/src/lib.rs", "pub fn bap() {}") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("bar,baz bam bap"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} diff --git a/collector/compile-benchmarks/cargo/tests/fetch.rs b/collector/compile-benchmarks/cargo/tests/fetch.rs new file mode 100644 index 000000000..8c5e9a59d --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/fetch.rs @@ -0,0 +1,24 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn no_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", r#" + mod a; fn main() {} + "#) + .file("src/a.rs", "") + .build(); + + assert_that(p.cargo("fetch"), + execs().with_status(0).with_stdout("")); +} diff --git a/collector/compile-benchmarks/cargo/tests/freshness.rs b/collector/compile-benchmarks/cargo/tests/freshness.rs new file mode 100644 index 000000000..df4de3864 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/freshness.rs @@ -0,0 +1,735 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::sleep_ms; +use cargotest::support::{project, execs, path2url}; +use cargotest::support::paths::CargoPathExt; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn modifying_and_moving() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", r#" + mod a; fn main() {} + "#) + .file("src/a.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = path2url(p.root())))); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); + p.root().move_into_the_past(); + p.root().join("target").move_into_the_past(); + + File::create(&p.root().join("src/a.rs")).unwrap() + .write_all(b"#[allow(unused)]fn main() {}").unwrap(); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = path2url(p.root())))); + + fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); + assert_that(p.cargo("build"), + execs().with_status(101)); +} + +#[test] +fn modify_only_some_files() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/lib.rs", "mod a;") + .file("src/a.rs", "") + .file("src/main.rs", r#" + mod b; + fn main() {} + "#) + .file("src/b.rs", "") + .file("tests/test.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = path2url(p.root())))); + assert_that(p.cargo("test"), + execs().with_status(0)); + sleep_ms(1000); + + assert_that(&p.bin("foo"), existing_file()); + + let lib = p.root().join("src/lib.rs"); + let bin = p.root().join("src/b.rs"); + + File::create(&lib).unwrap().write_all(b"invalid rust code").unwrap(); + File::create(&bin).unwrap().write_all(b"#[allow(unused)]fn foo() {}").unwrap(); + lib.move_into_the_past(); + + // Make sure the binary is rebuilt, not the lib + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = path2url(p.root())))); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn rebuild_sub_package_then_while_package() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.a] + path = "a" + [dependencies.b] + path = "b" + "#) + .file("src/lib.rs", "extern crate a; extern crate b;") + .file("a/Cargo.toml", r#" + [package] + name = "a" + authors = [] + version = "0.0.1" + [dependencies.b] + path = "../b" + "#) + .file("a/src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", r#" + [package] + name = "b" + authors = [] + version = "0.0.1" + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + File::create(&p.root().join("b/src/lib.rs")).unwrap().write_all(br#" + pub fn b() {} + "#).unwrap(); + + assert_that(p.cargo("build").arg("-pb"), + execs().with_status(0)); + + File::create(&p.root().join("src/lib.rs")).unwrap().write_all(br#" + extern crate a; + extern crate b; + pub fn toplevel() {} + "#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn changing_lib_features_caches_targets() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + /* Targets should be cached from the first build */ + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stdout("")); + + assert_that(p.cargo("build").arg("--features").arg("foo"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn changing_profiles_caches_targets() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [profile.dev] + panic = "abort" + + [profile.test] + panic = "unwind" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +[DOCTEST] foo +")); + + /* Targets should be cached from the first build */ + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("test").arg("foo"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +[DOCTEST] foo +")); +} + +#[test] +fn changing_bin_paths_common_target_features_caches_targets() { + // Make sure dep_cache crate is built once per feature + let p = project("foo") + .file(".cargo/config", r#" + [build] + target-dir = "./target" + "#) + .file("dep_crate/Cargo.toml", r#" + [package] + name = "dep_crate" + version = "0.0.1" + authors = [] + + [features] + ftest = [] + "#) + .file("dep_crate/src/lib.rs", r#" + #[cfg(feature = "ftest")] + pub fn yo() { + println!("ftest on") + } + #[cfg(not(feature = "ftest"))] + pub fn yo() { + println!("ftest off") + } + "#) + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = []} + "#) + .file("a/src/lib.rs", "") + .file("a/src/main.rs", r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#) + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = ["ftest"]} + "#) + .file("b/src/lib.rs", "") + .file("b/src/main.rs", r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#) + .build(); + + /* Build and rebuild a/. Ensure dep_crate only builds once */ + assert_that(p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0) + .with_stdout("ftest off") + .with_stderr("\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +")); + assert_that(p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")), + execs().with_status(0)); + assert_that(p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0) + .with_stdout("ftest off") + .with_stderr("\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +")); + + /* Build and rebuild b/. Ensure dep_crate only builds once */ + assert_that(p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0) + .with_stdout("ftest on") + .with_stderr("\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +")); + assert_that(p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")), + execs().with_status(0)); + assert_that(p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0) + .with_stdout("ftest on") + .with_stderr("\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +")); + + /* Build a/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild of dep_crate */ + assert_that(p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")), + execs().with_status(0)); + assert_that(p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0) + .with_stdout("ftest off") + .with_stderr("\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +")); + + /* Build b/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild */ + assert_that(p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")), + execs().with_status(0)); + assert_that(p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0) + .with_stdout("ftest on") + .with_stderr("\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +")); +} + +#[test] +fn changing_bin_features_caches_targets() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#) + .file("src/main.rs", r#" + fn main() { + let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" }; + println!("{}", msg); + } + "#) + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stdout("feature off") + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +")); + + assert_that(p.cargo("run").arg("--features").arg("foo"), + execs().with_status(0) + .with_stdout("feature on") + .with_stderr("\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +")); + + /* Targets should be cached from the first build */ + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stdout("feature off") + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +")); + + assert_that(p.cargo("run").arg("--features").arg("foo"), + execs().with_status(0) + .with_stdout("feature on") + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +")); +} + +#[test] +fn rebuild_tests_if_lib_changes() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file("tests/foo.rs", r#" + extern crate foo; + #[test] + fn test() { foo::foo(); } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("test"), + execs().with_status(0)); + + sleep_ms(1000); + File::create(&p.root().join("src/lib.rs")).unwrap(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(101)); +} + +#[test] +fn no_rebuild_transitive_target_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + [dev-dependencies] + b = { path = "b" } + "#) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + c = { path = "../c" } + "#) + .file("a/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + c = { path = "../c" } + "#) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + "#) + .file("c/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("test").arg("--no-run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] c v0.0.1 ([..]) +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn rerun_if_changed_in_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("a/build.rs", r#" + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + } + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn same_build_dir_cached_packages() { + let p = project("foo") + .file("a1/Cargo.toml", r#" + [package] + name = "a1" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#) + .file("a1/src/lib.rs", "") + .file("a2/Cargo.toml", r#" + [package] + name = "a2" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#) + .file("a2/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + [dependencies] + c = { path = "../c" } + "#) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + [dependencies] + d = { path = "../d" } + "#) + .file("c/src/lib.rs", "") + .file("d/Cargo.toml", r#" + [package] + name = "d" + version = "0.0.1" + authors = [] + "#) + .file("d/src/lib.rs", "") + .file(".cargo/config", r#" + [build] + target-dir = "./target" + "#) + .build(); + + assert_that(p.cargo("build").cwd(p.root().join("a1")), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] d v0.0.1 ({dir}/d) +[COMPILING] c v0.0.1 ({dir}/c) +[COMPILING] b v0.0.1 ({dir}/b) +[COMPILING] a1 v0.0.1 ({dir}/a1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + assert_that(p.cargo("build").cwd(p.root().join("a2")), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] a2 v0.0.1 ({dir}/a2) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); +} + +#[test] +fn no_rebuild_if_build_artifacts_move_backwards_in_time() { + let p = project("backwards_in_time") + .file("Cargo.toml", r#" + [package] + name = "backwards_in_time" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + p.root().move_into_the_past(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("").with_stderr("\ +[FINISHED] [..] +")); +} + +#[test] +fn rebuild_if_build_artifacts_move_forward_in_time() { + let p = project("forwards_in_time") + .file("Cargo.toml", r#" + [package] + name = "forwards_in_time" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + p.root().move_into_the_future(); + + assert_that(p.cargo("build").env("RUST_LOG", ""), + execs().with_status(0).with_stdout("").with_stderr("\ +[COMPILING] a v0.0.1 ([..]) +[COMPILING] forwards_in_time v0.0.1 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn rebuild_if_environment_changes() { + let p = project("env_change") + .file("Cargo.toml", r#" + [package] + name = "env_change" + description = "old desc" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + println!("{}", env!("CARGO_PKG_DESCRIPTION")); + } + "#) + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stdout("old desc").with_stderr(&format!("\ +[COMPILING] env_change v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]env_change[EXE]` +", dir = p.url()))); + + File::create(&p.root().join("Cargo.toml")).unwrap().write_all(br#" + [package] + name = "env_change" + description = "new desc" + version = "0.0.1" + authors = [] + "#).unwrap(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stdout("new desc").with_stderr(&format!("\ +[COMPILING] env_change v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]env_change[EXE]` +", dir = p.url()))); +} diff --git a/collector/compile-benchmarks/cargo/tests/generate-lockfile.rs b/collector/compile-benchmarks/cargo/tests/generate-lockfile.rs new file mode 100644 index 000000000..56f853018 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/generate-lockfile.rs @@ -0,0 +1,190 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::support::{project, execs}; +use hamcrest::{assert_that, existing_file, is_not}; + +#[test] +fn adding_and_removing_packages() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + + let toml = p.root().join("Cargo.toml"); + let lock1 = p.read_lockfile(); + + // add a dep + File::create(&toml).unwrap().write_all(br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.bar] + path = "bar" + "#).unwrap(); + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + let lock2 = p.read_lockfile(); + assert!(lock1 != lock2); + + // change the dep + File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" + [package] + name = "bar" + authors = [] + version = "0.0.2" + "#).unwrap(); + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + let lock3 = p.read_lockfile(); + assert!(lock1 != lock3); + assert!(lock2 != lock3); + + // remove the dep + println!("lock4"); + File::create(&toml).unwrap().write_all(br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#).unwrap(); + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + let lock4 = p.read_lockfile(); + assert_eq!(lock1, lock4); +} + +#[test] +fn preserve_metadata() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + + let metadata = r#" +[metadata] +bar = "baz" +foo = "bar" +"#; + let lockfile = p.root().join("Cargo.lock"); + let lock = p.read_lockfile(); + let data = lock + metadata; + File::create(&lockfile).unwrap().write_all(data.as_bytes()).unwrap(); + + // Build and make sure the metadata is still there + assert_that(p.cargo("build"), + execs().with_status(0)); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); + + // Update and make sure the metadata is still there + assert_that(p.cargo("update"), + execs().with_status(0)); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); +} + +#[test] +fn preserve_line_endings_issue_2076() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#) + .file("bar/src/lib.rs", "") + .build(); + + let lockfile = p.root().join("Cargo.lock"); + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + assert_that(&lockfile, + existing_file()); + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + + let lock0 = p.read_lockfile(); + + assert!(lock0.starts_with("[[package]]\n")); + + let lock1 = lock0.replace("\n", "\r\n"); + { + File::create(&lockfile).unwrap().write_all(lock1.as_bytes()).unwrap(); + } + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + + let lock2 = p.read_lockfile(); + + assert!(lock2.starts_with("[[package]]\r\n")); + assert_eq!(lock1, lock2); +} + +#[test] +fn cargo_update_generate_lockfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + let lockfile = p.root().join("Cargo.lock"); + assert_that(&lockfile, is_not(existing_file())); + assert_that(p.cargo("update"), execs().with_status(0).with_stdout("")); + assert_that(&lockfile, existing_file()); + + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + + assert_that(&lockfile, is_not(existing_file())); + assert_that(p.cargo("update"), execs().with_status(0).with_stdout("")); + assert_that(&lockfile, existing_file()); + +} diff --git a/collector/compile-benchmarks/cargo/tests/git.rs b/collector/compile-benchmarks/cargo/tests/git.rs new file mode 100644 index 000000000..c76c67f16 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/git.rs @@ -0,0 +1,2176 @@ +extern crate cargo; +extern crate cargotest; +extern crate git2; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::Path; + +use cargo::util::process; +use cargotest::sleep_ms; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::{git, project, execs, main_file, path2url}; +use hamcrest::{assert_that,existing_file}; + +#[test] +fn cargo_compile_simple_git_dep() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + }).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, git_project.url())) + .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that(project.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root)))); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello world\n")); +} + +#[test] +fn cargo_compile_git_dep_branch() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + }).unwrap(); + + // Make a new branch based on the current HEAD commit + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + repo.branch("branchy", &head, true).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + branch = "branchy" + + "#, git_project.url())) + .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that(project.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root)))); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello world\n")); +} + +#[test] +fn cargo_compile_git_dep_tag() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + }).unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag("v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + tag = "v0.1.0" + "#, git_project.url())) + .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that(project.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root)))); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that(process(&project.bin("foo")), + execs().with_stdout("hello world\n")); + + assert_that(project.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn cargo_compile_with_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [dependencies.dep2] + + version = "0.5.0" + path = "vendor/dep2" + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + extern crate dep2; + + pub fn hello() -> &'static str { + dep2::hello() + } + "#) + .file("vendor/dep2/Cargo.toml", r#" + [project] + + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep2" + "#) + .file("vendor/dep2/src/dep2.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + }).unwrap(); + + let p = project("parent") + .file("Cargo.toml", &format!(r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, git_project.url())) + .file("src/parent.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that(process(&p.bin("parent")), + execs().with_stdout("hello world\n")); +} + +#[test] +fn cargo_compile_with_malformed_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + .file("vendor/dep2/Cargo.toml", r#" + !INVALID! + "#) + }).unwrap(); + + let p = project("parent") + .file("Cargo.toml", &format!(r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, git_project.url())) + .file("src/parent.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that(process(&p.bin("parent")), + execs().with_stdout("hello world\n")); +} + +#[test] +fn cargo_compile_with_meta_package() { + let git_project = git::new("meta-dep", |project| { + project + .file("dep1/Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#) + .file("dep1/src/dep1.rs", r#" + pub fn hello() -> &'static str { + "this is dep1" + } + "#) + .file("dep2/Cargo.toml", r#" + [project] + + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep2" + "#) + .file("dep2/src/dep2.rs", r#" + pub fn hello() -> &'static str { + "this is dep2" + } + "#) + }).unwrap(); + + let p = project("parent") + .file("Cargo.toml", &format!(r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [dependencies.dep2] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, git_project.url(), git_project.url())) + .file("src/parent.rs", + &main_file(r#""{} {}", dep1::hello(), dep2::hello()"#, &["dep1", "dep2"])) + .build(); + + p.cargo("build") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that(process(&p.bin("parent")), + execs().with_stdout("this is dep1 this is dep2\n")); +} + +#[test] +fn cargo_compile_with_short_ssh_git() { + let url = "git@github.com:a/dep"; + + let project = project("project") + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep] + + git = "{}" + + [[bin]] + + name = "foo" + "#, url)) + .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + assert_that(project.cargo("build"), + execs() + .with_stdout("") + .with_stderr(&format!("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + invalid url `{}`: relative URL without a base +", url))); +} + +#[test] +fn two_revs_same_deps() { + let bar = git::new("meta-dep", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + let repo = git2::Repository::open(&bar.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Commit the changes and make sure we trigger a recompile + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" + pub fn bar() -> i32 { 2 } + "#).unwrap(); + git::add(&repo); + let rev2 = git::commit(&repo); + + let foo = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + + [dependencies.baz] + path = "../baz" + "#, bar.url(), rev1)) + .file("src/main.rs", r#" + extern crate bar; + extern crate baz; + + fn main() { + assert_eq!(bar::bar(), 1); + assert_eq!(baz::baz(), 2); + } + "#) + .build(); + + let _baz = project("baz") + .file("Cargo.toml", &format!(r#" + [package] + name = "baz" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + "#, bar.url(), rev2)) + .file("src/lib.rs", r#" + extern crate bar; + pub fn baz() -> i32 { bar::bar() } + "#) + .build(); + + assert_that(foo.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(&foo.bin("foo"), existing_file()); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); +} + +#[test] +fn recompilation() { + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + name = "bar" + "#) + .file("src/bar.rs", r#" + pub fn bar() {} + "#) + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + git = '{}' + "#, git_project.url())) + .file("src/main.rs", + &main_file(r#""{:?}", bar::bar()"#, &["bar"])) + .build(); + + // First time around we should compile both foo and bar + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + git_project.url(), + p.url()))); + + // Don't recompile the second time + assert_that(p.cargo("build"), + execs().with_stdout("")); + + // Modify a file manually, shouldn't trigger a recompile + File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" + pub fn bar() { println!("hello!"); } + "#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_stdout("")); + + assert_that(p.cargo("update"), + execs().with_stderr(&format!("[UPDATING] git repository `{}`", + git_project.url()))); + + assert_that(p.cargo("build"), + execs().with_stdout("")); + + // Commit the changes and make sure we don't trigger a recompile because the + // lockfile says not to change + let repo = git2::Repository::open(&git_project.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + println!("compile after commit"); + assert_that(p.cargo("build"), + execs().with_stdout("")); + p.root().move_into_the_past(); + + // Update the dependency and carry on! + assert_that(p.cargo("update"), + execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url()))); + println!("going for the last compile"); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + p.url()))); + + // Make sure clean only cleans one dep + assert_that(p.cargo("clean") + .arg("-p").arg("foo"), + execs().with_stdout("")); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url()))); +} + +#[test] +fn update_with_shared_deps() { + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + name = "bar" + "#) + .file("src/bar.rs", r#" + pub fn bar() {} + "#) + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + path = "dep1" + [dependencies.dep2] + path = "dep2" + "#) + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate dep1; + #[allow(unused_extern_crates)] + extern crate dep2; + fn main() {} + "#) + .file("dep1/Cargo.toml", &format!(r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, git_project.url())) + .file("dep1/src/lib.rs", "") + .file("dep2/Cargo.toml", &format!(r#" + [package] + name = "dep2" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, git_project.url())) + .file("dep2/src/lib.rs", "") + .build(); + + // First time around we should compile both foo and bar + assert_that(p.cargo("build"), + execs().with_stderr(&format!("\ +[UPDATING] git repository `{git}` +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", +git = git_project.url(), dir = p.url()))); + + // Modify a file manually, and commit it + File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" + pub fn bar() { println!("hello!"); } + "#).unwrap(); + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let old_head = repo.head().unwrap().target().unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + // By default, not transitive updates + println!("dep1 update"); + assert_that(p.cargo("update") + .arg("-p").arg("dep1"), + execs().with_stdout("")); + + // Don't do anything bad on a weird --precise argument + println!("bar bad precise update"); + assert_that(p.cargo("update") + .arg("-p").arg("bar") + .arg("--precise").arg("0.1.2"), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository [..] +[ERROR] Unable to update [..] + +To learn more, run the command again with --verbose. +")); + + // Specifying a precise rev to the old rev shouldn't actually update + // anything because we already have the rev in the db. + println!("bar precise update"); + assert_that(p.cargo("update") + .arg("-p").arg("bar") + .arg("--precise").arg(&old_head.to_string()), + execs().with_stdout("")); + + // Updating aggressively should, however, update the repo. + println!("dep1 aggressive update"); + assert_that(p.cargo("update") + .arg("-p").arg("dep1") + .arg("--aggressive"), + execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", git_project.url()))); + + // Make sure we still only compile one version of the git repo + println!("build"); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("\ +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ({dir}[..]dep[..]) +[COMPILING] [..] v0.5.0 ({dir}[..]dep[..]) +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + git = git_project.url(), dir = p.url()))); + + // We should be able to update transitive deps + assert_that(p.cargo("update").arg("-p").arg("bar"), + execs().with_stderr(&format!("[UPDATING] git repository `{}`", + git_project.url()))); +} + +#[test] +fn dep_with_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#) + }).unwrap(); + let git_project2 = git::new("dep2", |project| { + project.file("lib.rs", "pub fn dep() {}") + }).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, git_project.url())) + .file("src/lib.rs", " + extern crate dep1; + pub fn foo() { dep1::dep() } + ") + .build(); + + assert_that(project.cargo("build"), + execs().with_stderr("\ +[UPDATING] git repository [..] +[COMPILING] dep1 [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n").with_status(0)); +} + +#[test] +fn dep_with_bad_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#) + }).unwrap(); + let git_project2 = git::new("dep2", |project| { + project.file("lib.rs", "pub fn dep() {}") + }).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + // now amend the first commit on git_project2 to make submodule ref point to not-found + // commit + let repo = git2::Repository::open(&git_project2.root()).unwrap(); + let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap(); + let commit = repo.find_commit(original_submodule_ref).unwrap(); + commit.amend( + Some("refs/heads/master"), + None, + None, + None, + Some("something something"), + None).unwrap(); + + let p = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, git_project.url())) + .file("src/lib.rs", " + extern crate dep1; + pub fn foo() { dep1::dep() } + ") + .build(); + + let expected = format!("\ +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `dep1` + +Caused by: + Unable to update {} + +Caused by: + failed to update submodule `src` + +To learn more, run the command again with --verbose.\n", path2url(git_project.root())); + + assert_that(p.cargo("build"), + execs().with_stderr(expected).with_status(101)); +} + +#[test] +fn two_deps_only_update_one() { + let project = project("foo"); + let git1 = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#) + .file("src/lib.rs", "") + }).unwrap(); + let git2 = git::new("dep2", |project| { + project + .file("Cargo.toml", r#" + [package] + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + git = '{}' + [dependencies.dep2] + git = '{}' + "#, git1.url(), git2.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `[..]`\n\ + [UPDATING] git repository `[..]`\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + p.url()))); + + File::create(&git1.root().join("src/lib.rs")).unwrap().write_all(br#" + pub fn foo() {} + "#).unwrap(); + let repo = git2::Repository::open(&git1.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + assert_that(p.cargo("update") + .arg("-p").arg("dep1"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", git1.url()))); +} + +#[test] +fn stale_cached_version() { + let bar = git::new("meta-dep", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + // Update the git database in the cache with the current state of the git + // repo + let foo = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + "#, bar.url())) + .file("src/main.rs", r#" + extern crate bar; + + fn main() { assert_eq!(bar::bar(), 1) } + "#) + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); + + // Update the repo, and simulate someone else updating the lockfile and then + // us pulling it down. + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" + pub fn bar() -> i32 { 1 + 0 } + "#).unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + let rev = repo.revparse_single("HEAD").unwrap().id(); + + File::create(&foo.root().join("Cargo.lock")).unwrap().write_all(format!(r#" + [[package]] + name = "foo" + version = "0.0.0" + dependencies = [ + 'bar 0.0.0 (git+{url}#{hash})' + ] + + [[package]] + name = "bar" + version = "0.0.0" + source = 'git+{url}#{hash}' + "#, url = bar.url(), hash = rev).as_bytes()).unwrap(); + + // Now build! + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[UPDATING] git repository `{bar}` +[COMPILING] bar v0.0.0 ({bar}#[..]) +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", bar = bar.url(), foo = foo.url()))); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); +} + +#[test] +fn dep_with_changed_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#) + }).unwrap(); + + let git_project2 = git::new("dep2", |project| { + project + .file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") + }).unwrap(); + + let git_project3 = git::new("dep3", |project| { + project + .file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") + }).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), + Path::new("src")); + git::commit(&repo); + + let p = project + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + [dependencies.dep1] + git = '{}' + "#, git_project.url())) + .file("src/main.rs", " + extern crate dep1; + pub fn main() { println!(\"{}\", dep1::dep()) } + ") + .build(); + + println!("first run"); + assert_that(p.cargo("run"), execs() + .with_stderr("[UPDATING] git repository `[..]`\n\ + [COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target[/]debug[/]foo[EXE]`\n") + .with_stdout("project2\n") + .with_status(0)); + + File::create(&git_project.root().join(".gitmodules")).unwrap() + .write_all(format!("[submodule \"src\"]\n\tpath = src\n\turl={}", + git_project3.url()).as_bytes()).unwrap(); + + // Sync the submodule and reset it to the new remote. + sub.sync().unwrap(); + { + let subrepo = sub.open().unwrap(); + subrepo.remote_add_fetch("origin", + "refs/heads/*:refs/heads/*").unwrap(); + subrepo.remote_set_url("origin", + &git_project3.url().to_string()).unwrap(); + let mut origin = subrepo.find_remote("origin").unwrap(); + origin.fetch(&[], None, None).unwrap(); + let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); + let obj = subrepo.find_object(id, None).unwrap(); + subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); + } + sub.add_to_index(true).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + // Update the dependency and carry on! + println!("update"); + assert_that(p.cargo("update").arg("-v"), + execs() + .with_stderr("") + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", git_project.url()))); + + println!("last run"); + assert_that(p.cargo("run"), execs() + .with_stderr("[COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target[/]debug[/]foo[EXE]`\n") + .with_stdout("project3\n") + .with_status(0)); +} + +#[test] +fn dev_deps_with_testing() { + let p2 = git::new("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#) + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + version = "0.5.0" + git = '{}' + "#, p2.url())) + .file("src/main.rs", r#" + fn main() {} + + #[cfg(test)] + mod tests { + extern crate bar; + #[test] fn foo() { bar::gimme(); } + } + "#) + .build(); + + // Generate a lockfile which did not use `bar` to compile, but had to update + // `bar` to generate the lockfile + assert_that(p.cargo("build"), + execs().with_stderr(&format!("\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = p.url(), bar = p2.url()))); + + // Make sure we use the previous resolution of `bar` instead of updating it + // a second time. + assert_that(p.cargo("test"), + execs().with_stderr("\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test tests::foo ... ok")); +} + +#[test] +fn git_build_cmd_freshness() { + let foo = git::new("foo", |project| { + project.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file(".gitignore", " + src/bar.rs + ") + }).unwrap(); + foo.root().move_into_the_past(); + + sleep_ms(1000); + + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stdout("")); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that(foo.cargo("build"), + execs().with_status(0) + .with_stdout("")); +} + +#[test] +fn git_name_not_always_needed() { + let p2 = git::new("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#) + }).unwrap(); + + let repo = git2::Repository::open(&p2.root()).unwrap(); + let mut cfg = repo.config().unwrap(); + let _ = cfg.remove("user.name"); + let _ = cfg.remove("user.email"); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + git = '{}' + "#, p2.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + // Generate a lockfile which did not use `bar` to compile, but had to update + // `bar` to generate the lockfile + assert_that(p.cargo("build"), + execs().with_stderr(&format!("\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = p.url(), bar = p2.url()))); +} + +#[test] +fn git_repo_changing_no_rebuild() { + let bar = git::new("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + // Lock p1 to the first rev in the git repo + let p1 = project("p1") + .file("Cargo.toml", &format!(r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + build = 'build.rs' + [dependencies.bar] + git = '{}' + "#, bar.url())) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "fn main() {}") + .build(); + p1.root().move_into_the_past(); + assert_that(p1.cargo("build"), + execs().with_stderr(&format!("\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", bar = bar.url()))); + + // Make a commit to lock p2 to a different rev + File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" + pub fn bar() -> i32 { 2 } + "#).unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + // Lock p2 to the second rev + let p2 = project("p2") + .file("Cargo.toml", &format!(r#" + [project] + name = "p2" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, bar.url())) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that(p2.cargo("build"), + execs().with_stderr(&format!("\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", bar = bar.url()))); + + // And now for the real test! Make sure that p1 doesn't get rebuilt + // even though the git repo has changed. + assert_that(p1.cargo("build"), + execs().with_stdout("")); +} + +#[test] +fn git_dep_build_cmd() { + let p = git::new("foo", |project| { + project.file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + + [[bin]] + + name = "foo" + "#) + .file("src/foo.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#) + .file("bar/src/bar.rs.in", r#" + pub fn gimme() -> i32 { 0 } + "#) + .file("bar/build.rs", r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#) + }).unwrap(); + + p.root().join("bar").move_into_the_past(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("0\n")); + + // Touching bar.rs.in should cause the `build` command to run again. + fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap() + .write_all(b"pub fn gimme() -> i32 { 1 }").unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("1\n")); +} + +#[test] +fn fetch_downloads() { + let bar = git::new("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + let p = project("p1") + .file("Cargo.toml", &format!(r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, bar.url())) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that(p.cargo("fetch"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] git repository `{url}` +", url = bar.url()))); + + assert_that(p.cargo("fetch"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn warnings_in_git_dep() { + let bar = git::new("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "fn unused() {}") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, bar.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + bar.url(), + bar.url(), + p.url()))); +} + +#[test] +fn update_ambiguous() { + let foo1 = git::new("foo1", |project| { + project.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + }).unwrap(); + let foo2 = git::new("foo2", |project| { + project.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.6.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + }).unwrap(); + let bar = git::new("bar", |project| { + project.file("Cargo.toml", &format!(r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.foo] + git = '{}' + "#, foo2.url())) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file("Cargo.toml", &format!(r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.foo] + git = '{}' + [dependencies.bar] + git = '{}' + "#, foo1.url(), bar.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(p.cargo("update") + .arg("-p").arg("foo"), + execs().with_status(101) + .with_stderr("\ +[ERROR] There are multiple `foo` packages in your project, and the specification `foo` \ +is ambiguous. +Please re-run this command with `-p ` where `` is one of the \ +following: + foo:0.[..].0 + foo:0.[..].0 +")); +} + +#[test] +fn update_one_dep_in_repo_with_many_deps() { + let foo = git::new("foo", |project| { + project.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("a/src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file("Cargo.toml", &format!(r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.foo] + git = '{}' + [dependencies.a] + git = '{}' + "#, foo.url(), foo.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(p.cargo("update") + .arg("-p").arg("foo"), + execs().with_status(0) + .with_stderr(&format!("\ +[UPDATING] git repository `{}` +", foo.url()))); +} + +#[test] +fn switch_deps_does_not_update_transitive() { + let transitive = git::new("transitive", |project| { + project.file("Cargo.toml", r#" + [package] + name = "transitive" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + }).unwrap(); + let dep1 = git::new("dep1", |project| { + project.file("Cargo.toml", &format!(r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, transitive.url())) + .file("src/lib.rs", "") + }).unwrap(); + let dep2 = git::new("dep2", |project| { + project.file("Cargo.toml", &format!(r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, transitive.url())) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file("Cargo.toml", &format!(r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, dep1.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[UPDATING] git repository `{}` +[UPDATING] git repository `{}` +[COMPILING] transitive [..] +[COMPILING] dep [..] +[COMPILING] project [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dep1.url(), transitive.url()))); + + // Update the dependency to point to the second repository, but this + // shouldn't update the transitive dependency which is the same. + File::create(&p.root().join("Cargo.toml")).unwrap().write_all(format!(r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, dep2.url()).as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("\ +[UPDATING] git repository `{}` +[COMPILING] dep [..] +[COMPILING] project [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dep2.url()))); +} + +#[test] +fn update_one_source_updates_all_packages_in_that_git_source() { + let dep = git::new("dep", |project| { + project.file("Cargo.toml", r#" + [package] + name = "dep" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file("Cargo.toml", &format!(r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, dep.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let repo = git2::Repository::open(&dep.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Just be sure to change a file + File::create(&dep.root().join("src/lib.rs")).unwrap().write_all(br#" + pub fn bar() -> i32 { 2 } + "#).unwrap(); + git::add(&repo); + git::commit(&repo); + + assert_that(p.cargo("update").arg("-p").arg("dep"), + execs().with_status(0)); + let mut lockfile = String::new(); + File::open(&p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lockfile).unwrap(); + assert!(!lockfile.contains(&rev1.to_string()), + "{} in {}", rev1, lockfile); +} + +#[test] +fn switch_sources() { + let a1 = git::new("a1", |project| { + project.file("Cargo.toml", r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + let a2 = git::new("a2", |project| { + project.file("Cargo.toml", r#" + [package] + name = "a" + version = "0.5.1" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file("Cargo.toml", r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.b] + path = "b" + "#) + .file("src/main.rs", "fn main() {}") + .file("b/Cargo.toml", &format!(r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, a1.url())) + .file("b/src/lib.rs", "pub fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] git repository `file://[..]a1` +[COMPILING] a v0.5.0 ([..]a1#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] project v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + File::create(&p.root().join("b/Cargo.toml")).unwrap().write_all(format!(r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, a2.url()).as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] git repository `file://[..]a2` +[COMPILING] a v0.5.1 ([..]a2#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] project v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn dont_require_submodules_are_checked_out() { + let p = project("foo").build(); + let git1 = git::new("dep1", |p| { + p.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("a/foo", "") + }).unwrap(); + let git2 = git::new("dep2", |p| p).unwrap(); + + let repo = git2::Repository::open(&git1.root()).unwrap(); + let url = path2url(git2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("a/submodule")); + git::commit(&repo); + + git2::Repository::init(&p.root()).unwrap(); + let url = path2url(git1.root()).to_string(); + let dst = paths::home().join("foo"); + git2::Repository::clone(&url, &dst).unwrap(); + + assert_that(git1.cargo("build").arg("-v").cwd(&dst), + execs().with_status(0)); +} + +#[test] +fn doctest_same_name() { + let a2 = git::new("a2", |p| { + p.file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn a2() {}") + }).unwrap(); + + let a1 = git::new("a1", |p| { + p.file("Cargo.toml", &format!(r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + a = {{ git = '{}' }} + "#, a2.url())) + .file("src/lib.rs", "extern crate a; pub fn a1() {}") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, a1.url())) + .file("src/lib.rs", r#" + #[macro_use] + extern crate a; + "#) + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn lints_are_suppressed() { + let a = git::new("a", |p| { + p.file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", " + use std::option; + ") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, a.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn denied_lints_are_allowed() { + let a = git::new("a", |p| { + p.file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", " + #![deny(warnings)] + use std::option; + ") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, a.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn add_a_git_dep() { + let git = git::new("git", |p| { + p.file("Cargo.toml", r#" + [project] + name = "git" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ path = 'a' }} + git = {{ git = '{}' }} + "#, git.url())) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + File::create(p.root().join("a/Cargo.toml")).unwrap().write_all(format!(r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{}' }} + "#, git.url()).as_bytes()).unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn two_at_rev_instead_of_tag() { + let git = git::new("git", |p| { + p.file("Cargo.toml", r#" + [project] + name = "git1" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "git2" + version = "0.5.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + }).unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag("v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git1 = {{ git = '{0}', rev = 'v0.1.0' }} + git2 = {{ git = '{0}', rev = 'v0.1.0' }} + "#, git.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +#[ignore] // accesses crates.io +fn include_overrides_gitignore() { + let p = git::new("reduction", |repo| { + repo.file("Cargo.toml", r#" + [package] + name = "reduction" + version = "0.5.0" + authors = ["pnkfelix"] + build = "tango-build.rs" + include = ["src/lib.rs", "src/incl.rs", "src/mod.md", "tango-build.rs", "Cargo.toml"] + + [build-dependencies] + filetime = "0.1" + "#) + .file(".gitignore", r#" + target + Cargo.lock + # Below files represent generated code, thus not managed by `git` + src/incl.rs + src/not_incl.rs + "#) + .file("tango-build.rs", r#" + extern crate filetime; + use filetime::FileTime; + use std::fs::{self, File}; + + fn main() { + // generate files, or bring their timestamps into sync. + let source = "src/mod.md"; + + let metadata = fs::metadata(source).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + + // sync time stamps for generated files with time stamp of source file. + + let files = ["src/not_incl.rs", "src/incl.rs"]; + for file in files.iter() { + File::create(file).unwrap(); + filetime::set_file_times(file, atime, mtime).unwrap(); + } + } + "#) + .file("src/lib.rs", r#" + mod not_incl; + mod incl; + "#) + .file("src/mod.md", r#" + (The content of this file does not matter since we are not doing real codegen.) + "#) + }).unwrap(); + + println!("build 1: all is new"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] filetime [..] +[DOWNLOADING] libc [..] +[COMPILING] libc [..] +[RUNNING] `rustc --crate-name libc [..]` +[COMPILING] filetime [..] +[RUNNING] `rustc --crate-name filetime [..]` +[COMPILING] reduction [..] +[RUNNING] `rustc --crate-name build_script_tango_build tango-build.rs --crate-type bin [..]` +[RUNNING] `[..][/]build-script-tango-build` +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + println!("build 2: nothing changed; file timestamps reset by build script"); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[FRESH] libc [..] +[FRESH] filetime [..] +[FRESH] reduction [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + println!("build 3: touch `src/not_incl.rs`; expect build script *not* re-run"); + sleep_ms(1000); + File::create(p.root().join("src").join("not_incl.rs")).unwrap(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[FRESH] libc [..] +[FRESH] filetime [..] +[COMPILING] reduction [..] +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + // This final case models the bug from rust-lang/cargo#4135: an + // explicitly included file should cause a build-script re-run, + // even if that same file is matched by `.gitignore`. + println!("build 4: touch `src/incl.rs`; expect build script re-run"); + sleep_ms(1000); + File::create(p.root().join("src").join("incl.rs")).unwrap(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[FRESH] libc [..] +[FRESH] filetime [..] +[COMPILING] reduction [..] +[RUNNING] `[..][/]build-script-tango-build` +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn invalid_git_dependency_manifest() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + categories = ["algorithms"] + categories = ["algorithms"] + + [lib] + + name = "dep1" + "#) + .file("src/dep1.rs", r#" + pub fn hello() -> &'static str { + "hello world" + } + "#) + }).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, git_project.url())) + .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])) + .build(); + + let git_root = git_project.root(); + + assert_that(project.cargo("build"), + execs() + .with_stderr(&format!("[UPDATING] git repository `{}`\n\ + error: failed to load source for a dependency on `dep1`\n\ + \n\ + Caused by:\n \ + Unable to update {}\n\ + \n\ + Caused by:\n \ + failed to parse manifest at `[..]`\n\ + \n\ + Caused by:\n \ + could not parse input as TOML\n\ + \n\ + Caused by:\n \ + duplicate key: `categories` for key `project`", + path2url(git_root.clone()), + path2url(git_root), + ))); +} diff --git a/collector/compile-benchmarks/cargo/tests/init.rs b/collector/compile-benchmarks/cargo/tests/init.rs new file mode 100644 index 000000000..47efa3b97 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/init.rs @@ -0,0 +1,433 @@ +extern crate cargotest; +extern crate cargo; +extern crate tempdir; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::env; + +use cargo::util::ProcessBuilder; +use cargotest::support::{execs, paths, cargo_exe}; +use hamcrest::{assert_that, existing_file, existing_dir, is_not}; +use tempdir::TempDir; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::process(&cargo_exe()); + p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); + p +} + +#[test] +fn simple_lib() { + assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("none") + .env("USER", "foo"), + execs().with_status(0).with_stderr("\ +[CREATED] library project +")); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".gitignore"), is_not(existing_file())); + + assert_that(cargo_process("build"), + execs().with_status(0)); +} + +#[test] +fn simple_bin() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + assert_that(cargo_process("init").arg("--bin").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(0).with_stderr("\ +[CREATED] binary (application) project +")); + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); + + assert_that(cargo_process("build").cwd(&path), + execs().with_status(0)); + assert_that(&paths::root().join(&format!("foo/target/debug/foo{}", + env::consts::EXE_SUFFIX)), + existing_file()); +} + +#[test] +fn both_lib_and_bin() { + let td = TempDir::new("cargo").unwrap(); + assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path()) + .env("USER", "foo"), + execs().with_status(101).with_stderr( + "[ERROR] can't specify both lib and binary outputs")); +} + +fn bin_already_exists(explicit: bool, rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + fn main() { + println!("Hello, world 2!"); + } + "#; + + File::create(&sourcefile_path).unwrap().write_all(content).unwrap(); + + if explicit { + assert_that(cargo_process("init").arg("--bin").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(0)); + } else { + assert_that(cargo_process("init").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(0)); + } + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/lib.rs"), is_not(existing_file())); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path).unwrap().read_to_end(&mut new_content).unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[test] +fn bin_already_exists_explicit() { + bin_already_exists(true, "src/main.rs") +} + +#[test] +fn bin_already_exists_implicit() { + bin_already_exists(false, "src/main.rs") +} + +#[test] +fn bin_already_exists_explicit_nosrc() { + bin_already_exists(true, "main.rs") +} + +#[test] +fn bin_already_exists_implicit_nosrc() { + bin_already_exists(false, "main.rs") +} + +#[test] +fn bin_already_exists_implicit_namenosrc() { + bin_already_exists(false, "foo.rs") +} + +#[test] +fn bin_already_exists_implicit_namesrc() { + bin_already_exists(false, "src/foo.rs") +} + +#[test] +fn confused_by_multiple_lib_files() { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path1 = path.join("src/lib.rs"); + + File::create(&sourcefile_path1).unwrap().write_all(br#" + fn qqq () { + println!("Hello, world 2!"); + } + "#).unwrap(); + + let sourcefile_path2 = path.join("lib.rs"); + + File::create(&sourcefile_path2).unwrap().write_all(br#" + fn qqq () { + println!("Hello, world 3!"); + } + "#).unwrap(); + + assert_that(cargo_process("init").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(101).with_stderr("\ +[ERROR] cannot have a project with multiple libraries, found both `src/lib.rs` and `lib.rs` +")); + + assert_that(&paths::root().join("foo/Cargo.toml"), is_not(existing_file())); +} + + +#[test] +fn multibin_project_name_clash() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + + let sourcefile_path1 = path.join("foo.rs"); + + File::create(&sourcefile_path1).unwrap().write_all(br#" + fn main () { + println!("Hello, world 2!"); + } + "#).unwrap(); + + let sourcefile_path2 = path.join("main.rs"); + + File::create(&sourcefile_path2).unwrap().write_all(br#" + fn main () { + println!("Hello, world 3!"); + } + "#).unwrap(); + + assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(101).with_stderr("\ +[ERROR] multiple possible binary sources found: + main.rs + foo.rs +cannot automatically generate Cargo.toml as the main target would be ambiguous +")); + + assert_that(&paths::root().join("foo/Cargo.toml"), is_not(existing_file())); +} + +fn lib_already_exists(rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + pub fn qqq() {} + "#; + + File::create(&sourcefile_path).unwrap().write_all(content).unwrap(); + + assert_that(cargo_process("init").arg("--vcs").arg("none") + .env("USER", "foo").cwd(&path), + execs().with_status(0)); + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/main.rs"), is_not(existing_file())); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path).unwrap().read_to_end(&mut new_content).unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[test] +fn lib_already_exists_src() { + lib_already_exists("src/lib.rs") +} + +#[test] +fn lib_already_exists_nosrc() { + lib_already_exists("lib.rs") +} + +#[test] +fn simple_git() { + assert_that(cargo_process("init").arg("--lib") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); +} + +#[test] +fn auto_git() { + let td = TempDir::new("cargo").unwrap(); + let foo = &td.path().join("foo"); + fs::create_dir_all(&foo).unwrap(); + assert_that(cargo_process("init").arg("--lib") + .cwd(foo.clone()) + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&foo.join("Cargo.toml"), existing_file()); + assert_that(&foo.join("src/lib.rs"), existing_file()); + assert_that(&foo.join(".git"), existing_dir()); + assert_that(&foo.join(".gitignore"), existing_file()); +} + +#[test] +fn invalid_dir_name() { + let foo = &paths::root().join("foo.bar"); + fs::create_dir_all(&foo).unwrap(); + assert_that(cargo_process("init").cwd(foo.clone()) + .env("USER", "foo"), + execs().with_status(101).with_stderr("\ +[ERROR] Invalid character `.` in crate name: `foo.bar` +use --name to override crate name +")); + + assert_that(&foo.join("Cargo.toml"), is_not(existing_file())); +} + +#[test] +fn reserved_name() { + let test = &paths::root().join("test"); + fs::create_dir_all(&test).unwrap(); + assert_that(cargo_process("init").cwd(test.clone()) + .env("USER", "foo"), + execs().with_status(101).with_stderr("\ +[ERROR] The name `test` cannot be used as a crate name\n\ +use --name to override crate name +")); + + assert_that(&test.join("Cargo.toml"), is_not(existing_file())); +} + +#[test] +fn git_autodetect() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that(cargo_process("init").arg("--lib") + .env("USER", "foo"), + execs().with_status(0)); + + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); +} + + +#[test] +fn mercurial_autodetect() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + assert_that(cargo_process("init").arg("--lib") + .env("USER", "foo"), + execs().with_status(0)); + + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), is_not(existing_dir())); + assert_that(&paths::root().join(".hgignore"), existing_file()); +} + +#[test] +fn gitignore_appended_not_replaced() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join(".gitignore")).unwrap().write_all(b"qqqqqq\n").unwrap(); + + assert_that(cargo_process("init").arg("--lib") + .env("USER", "foo"), + execs().with_status(0)); + + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"qqqqqq"#)); +} + +#[test] +fn cargo_lock_gitignored_if_lib1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("git") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_gitignored_if_lib2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("lib.rs")).unwrap().write_all(br#""#).unwrap(); + + assert_that(cargo_process("init").arg("--vcs").arg("git") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_not_gitignored_if_bin1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that(cargo_process("init").arg("--vcs").arg("git") + .arg("--bin") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_not_gitignored_if_bin2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("main.rs")).unwrap().write_all(br#""#).unwrap(); + + assert_that(cargo_process("init").arg("--vcs").arg("git") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn with_argument() { + assert_that(cargo_process("init").arg("foo").arg("--vcs").arg("none") + .env("USER", "foo"), + execs().with_status(0)); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); +} + + +#[test] +fn unknown_flags() { + assert_that(cargo_process("init").arg("foo").arg("--flag"), + execs().with_status(1) + .with_stderr("\ +[ERROR] Unknown flag: '--flag' + +Usage: + cargo init [options] [] + cargo init -h | --help +")); +} + +#[cfg(not(windows))] +#[test] +fn no_filename() { + assert_that(cargo_process("init").arg("/"), + execs().with_status(101) + .with_stderr("\ +[ERROR] cannot auto-detect project name from path \"/\" ; use --name to override +".to_string())); +} diff --git a/collector/compile-benchmarks/cargo/tests/install.rs b/collector/compile-benchmarks/cargo/tests/install.rs new file mode 100644 index 000000000..d472a324b --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/install.rs @@ -0,0 +1,943 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File, OpenOptions}; +use std::io::prelude::*; + +use cargo::util::ProcessBuilder; +use cargotest::install::{cargo_home, has_installed_exe}; +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{project, execs}; +use hamcrest::{assert_that, is_not}; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::cargo_process(); + p.arg(s); + p +} + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/lib.rs", "") + .file("src/main.rs", &format!(" + extern crate {}; + fn main() {{}} + ", name)) + .publish(); +} + +#[test] +fn simple() { + pkg("foo", "0.0.1"); + + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 (registry [..]) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + assert_that(cargo_home(), has_installed_exe("foo")); + + assert_that(cargo_process("uninstall").arg("foo"), + execs().with_status(0).with_stderr(&format!("\ +[REMOVING] {home}[..]bin[..]foo[..] +", + home = cargo_home().display()))); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); +} + +#[test] +fn multiple_pkgs() { + pkg("foo", "0.0.1"); + pkg("bar", "0.0.2"); + + assert_that(cargo_process("install").args(&["foo", "bar", "baz"]), + execs().with_status(101).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 (registry file://[..]) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +[DOWNLOADING] bar v0.0.2 (registry file://[..]) +[INSTALLING] bar v0.0.2 +[COMPILING] bar v0.0.2 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]bar[..] +error: could not find `baz` in `registry [..]` + +Summary: Successfully installed foo, bar! Failed to install baz (see error(s) above). +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +error: some crates failed to install +", + home = cargo_home().display()))); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); + + assert_that(cargo_process("uninstall").arg("foo"), + execs().with_status(0).with_stderr(&format!("\ +[REMOVING] {home}[..]bin[..]foo[..] +", + home = cargo_home().display()))); + assert_that(cargo_process("uninstall").arg("bar"), + execs().with_status(0).with_stderr(&format!("\ +[REMOVING] {home}[..]bin[..]bar[..] +", + home = cargo_home().display()))); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); +} + +#[test] +fn pick_max_version() { + pkg("foo", "0.0.1"); + pkg("foo", "0.0.2"); + + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.2 (registry [..]) +[INSTALLING] foo v0.0.2 +[COMPILING] foo v0.0.2 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn missing() { + pkg("foo", "0.0.1"); + assert_that(cargo_process("install").arg("bar"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[ERROR] could not find `bar` in `registry [..]` +")); +} + +#[test] +fn bad_version() { + pkg("foo", "0.0.1"); + assert_that(cargo_process("install").arg("foo").arg("--vers=0.2.0"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[ERROR] could not find `foo` in `registry [..]` with version `=0.2.0` +")); +} + +#[test] +fn no_crate() { + assert_that(cargo_process("install"), + execs().with_status(101).with_stderr("\ +[ERROR] `[..]` is not a crate root; specify a crate to install [..] + +Caused by: + failed to read `[..]Cargo.toml` + +Caused by: + [..] (os error [..]) +")); +} + +#[test] +fn install_location_precedence() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let t1 = root.join("t1"); + let t2 = root.join("t2"); + let t3 = root.join("t3"); + let t4 = cargo_home(); + + fs::create_dir(root.join(".cargo")).unwrap(); + File::create(root.join(".cargo/config")).unwrap().write_all(format!("\ + [install] + root = '{}' + ", t3.display()).as_bytes()).unwrap(); + + println!("install --root"); + + assert_that(cargo_process("install").arg("foo") + .arg("--root").arg(&t1) + .env("CARGO_INSTALL_ROOT", &t2), + execs().with_status(0)); + assert_that(&t1, has_installed_exe("foo")); + assert_that(&t2, is_not(has_installed_exe("foo"))); + + println!("install CARGO_INSTALL_ROOT"); + + assert_that(cargo_process("install").arg("foo") + .env("CARGO_INSTALL_ROOT", &t2), + execs().with_status(0)); + assert_that(&t2, has_installed_exe("foo")); + assert_that(&t3, is_not(has_installed_exe("foo"))); + + println!("install install.root"); + + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0)); + assert_that(&t3, has_installed_exe("foo")); + assert_that(&t4, is_not(has_installed_exe("foo"))); + + fs::remove_file(root.join(".cargo/config")).unwrap(); + + println!("install cargo home"); + + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0)); + assert_that(&t4, has_installed_exe("foo")); +} + +#[test] +fn install_path() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_process("install").arg("--path").arg(".").cwd(p.root()), + execs().with_status(101).with_stderr("\ +[INSTALLING] foo v0.1.0 [..] +[ERROR] binary `foo[..]` already exists in destination as part of `foo v0.1.0 [..]` +Add --force to overwrite +")); +} + +#[test] +fn multiple_crates_error() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("a/src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository [..] +[ERROR] multiple packages with binaries found: bar, foo +")); +} + +#[test] +fn multiple_crates_select() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("a/src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()) + .arg("foo"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); + + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()) + .arg("bar"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn multiple_crates_auto_binaries() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#) + .file("src/main.rs", "extern crate bar; fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn multiple_crates_auto_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#) + .file("src/lib.rs", "extern crate bar;") + .file("examples/foo.rs", " + extern crate bar; + extern crate foo; + fn main() {} + ") + .file("a/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()) + .arg("--example=foo"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn no_binaries_or_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(101).with_stderr("\ +[ERROR] no packages found with binaries or examples +")); +} + +#[test] +fn no_binaries() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("foo"), + execs().with_status(101).with_stderr("\ +[INSTALLING] foo [..] +[ERROR] specified package has no binaries +")); +} + +#[test] +fn examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file("examples/foo.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()) + .arg("--example=foo"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn install_twice() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(101).with_stderr("\ +[INSTALLING] foo v0.1.0 [..] +[ERROR] binary `foo-bin1[..]` already exists in destination as part of `foo v0.1.0 ([..])` +binary `foo-bin2[..]` already exists in destination as part of `foo v0.1.0 ([..])` +Add --force to overwrite +")); +} + +#[test] +fn install_force() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + + let p = project("foo2") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--force").arg("--path").arg(p.root()), + execs().with_status(0).with_stderr(&format!("\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("\ +foo v0.2.0 ([..]): + foo[..] +")); +} + +#[test] +fn install_force_partial_overlap() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + + let p = project("foo2") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/bin/foo-bin2.rs", "fn main() {}") + .file("src/bin/foo-bin3.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--force").arg("--path").arg(p.root()), + execs().with_status(0).with_stderr(&format!("\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo-bin3[..] +[REPLACING] {home}[..]bin[..]foo-bin2[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("\ +foo v0.1.0 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] + foo-bin3[..] +")); +} + +#[test] +fn install_force_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + + let p = project("foo2") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--force") + .arg("--bin") + .arg("foo-bin2") + .arg("--path") + .arg(p.root()), + execs().with_status(0).with_stderr(&format!("\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] {home}[..]bin[..]foo-bin2[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("\ +foo v0.1.0 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] +")); +} + +#[test] +fn compile_failure() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(101).with_stderr_contains("\ +[ERROR] failed to compile `foo v0.1.0 ([..])`, intermediate artifacts can be \ + found at `[..]target` + +Caused by: + Could not compile `foo`. + +To learn more, run the command again with --verbose. +")); +} + +#[test] +fn git_repo() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + // use `--locked` to test that we don't even try to write a lockfile + assert_that(cargo_process("install").arg("--locked").arg("--git").arg(p.url().to_string()), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] git repository `[..]` +[INSTALLING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display()))); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn list() { + pkg("foo", "0.0.1"); + pkg("bar", "0.2.1"); + pkg("bar", "0.2.2"); + + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("")); + + assert_that(cargo_process("install").arg("bar").arg("--vers").arg("=0.2.1"), + execs().with_status(0)); + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0)); + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("\ +bar v0.2.1: + bar[..] +foo v0.0.1: + foo[..] +")); +} + +#[test] +fn list_error() { + pkg("foo", "0.0.1"); + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0)); + assert_that(cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout("\ +foo v0.0.1: + foo[..] +")); + let mut worldfile_path = cargo_home(); + worldfile_path.push(".crates.toml"); + let mut worldfile = OpenOptions::new() + .write(true) + .open(worldfile_path) + .expect(".crates.toml should be there"); + worldfile.write_all(b"\x00").unwrap(); + drop(worldfile); + assert_that(cargo_process("install").arg("--list").arg("--verbose"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse crate metadata at `[..]` + +Caused by: + invalid TOML found for metadata + +Caused by: + unexpected character[..] +")); +} + +#[test] +fn uninstall_pkg_does_not_exist() { + assert_that(cargo_process("uninstall").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] package id specification `foo` matched no packages +")); +} + +#[test] +fn uninstall_bin_does_not_exist() { + pkg("foo", "0.0.1"); + + assert_that(cargo_process("install").arg("foo"), + execs().with_status(0)); + assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), + execs().with_status(101).with_stderr("\ +[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1` +")); +} + +#[test] +fn uninstall_piecemeal() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); + + assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), + execs().with_status(0).with_stderr("\ +[REMOVING] [..]bar[..] +")); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); + + assert_that(cargo_process("uninstall").arg("foo").arg("--bin=foo"), + execs().with_status(0).with_stderr("\ +[REMOVING] [..]foo[..] +")); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + + assert_that(cargo_process("uninstall").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] package id specification `foo` matched no packages +")); +} + +#[test] +fn subcommand_works_out_of_the_box() { + Package::new("cargo-foo", "1.0.0") + .file("src/main.rs", r#" + fn main() { + println!("bar"); + } + "#) + .publish(); + assert_that(cargo_process("install").arg("cargo-foo"), + execs().with_status(0)); + assert_that(cargo_process("foo"), + execs().with_status(0).with_stdout("bar\n")); + assert_that(cargo_process("--list"), + execs().with_status(0).with_stdout_contains(" foo\n")); +} + +#[test] +fn installs_from_cwd_by_default() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").cwd(p.root()), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn do_not_rebuilds_on_local_install() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0)); + assert_that(cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0).with_stderr("[INSTALLING] [..] +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +")); + + assert!(p.build_dir().exists()); + assert!(p.release_bin("foo").exists()); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn reports_unsuccessful_subcommand_result() { + Package::new("cargo-fail", "1.0.0") + .file("src/main.rs", r#" + fn main() { + panic!(); + } + "#) + .publish(); + assert_that(cargo_process("install").arg("cargo-fail"), + execs().with_status(0)); + assert_that(cargo_process("--list"), + execs().with_status(0).with_stdout_contains(" fail\n")); + assert_that(cargo_process("fail"), + execs().with_status(101).with_stderr_contains("\ +thread '[..]' panicked at 'explicit panic', [..] +")); +} + +#[test] +fn git_with_lockfile() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "fn main() {}") + .file("Cargo.lock", r#" + [[package]] + name = "foo" + version = "0.1.0" + dependencies = [ "bar 0.1.0" ] + + [[package]] + name = "bar" + version = "0.1.0" + "#) + .build(); + + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()), + execs().with_status(0)); +} + +#[test] +fn q_silences_warnings() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(cargo_process("install").arg("-q").arg("--path").arg(p.root()), + execs().with_status(0).with_stderr("")); +} + +#[test] +fn readonly_dir() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let dir = &root.join("readonly"); + fs::create_dir(root.join("readonly")).unwrap(); + let mut perms = fs::metadata(dir).unwrap().permissions(); + perms.set_readonly(true); + fs::set_permissions(dir, perms).unwrap(); + + assert_that(cargo_process("install").arg("foo").cwd(dir), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn use_path_workspace() { + Package::new("foo", "1.0.0").publish(); + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = ["baz"] + "#) + .file("src/main.rs", "fn main() {}") + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "1" + "#) + .file("baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let lock = p.read_lockfile(); + assert_that(p.cargo("install"), execs().with_status(0)); + let lock2 = p.read_lockfile(); + assert!(lock == lock2, "different lockfiles"); +} + +#[test] +fn vers_precise() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + assert_that(cargo_process("install").arg("foo").arg("--vers").arg("0.1.1"), + execs().with_status(0).with_stderr_contains("\ +[DOWNLOADING] foo v0.1.1 (registry [..]) +")); +} + +#[test] +fn legacy_version_requirement() { + pkg("foo", "0.1.1"); + + assert_that(cargo_process("install").arg("foo").arg("--vers").arg("0.1"), + execs().with_status(0).with_stderr_contains("\ +warning: the `--vers` provided, `0.1`, is not a valid semver version + +historically Cargo treated this as a semver version requirement accidentally +and will continue to do so, but this behavior will be removed eventually +")); +} + +#[test] +fn test_install_git_cannot_be_a_base_url() { + assert_that(cargo_process("install").arg("--git").arg("github.com:rust-lang-nursery/rustfmt.git"), + execs().with_status(101).with_stderr("\ +error: invalid url `github.com:rust-lang-nursery/rustfmt.git`: cannot-be-a-base-URLs are not supported +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/jobserver.rs b/collector/compile-benchmarks/cargo/tests/jobserver.rs new file mode 100644 index 000000000..6803f3fbe --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/jobserver.rs @@ -0,0 +1,183 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::net::TcpListener; +use std::thread; +use std::process::Command; + +use cargotest::support::{project, execs, cargo_exe}; +use hamcrest::assert_that; + +#[test] +fn jobserver_exists() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("build.rs", r#" + use std::env; + + fn main() { + let var = env::var("CARGO_MAKEFLAGS").unwrap(); + let arg = var.split(' ') + .find(|p| p.starts_with("--jobserver")) + .unwrap(); + let val = &arg[arg.find('=').unwrap() + 1..]; + validate(val); + } + + #[cfg(unix)] + fn validate(s: &str) { + use std::fs::File; + use std::io::*; + use std::os::unix::prelude::*; + + let fds = s.split(',').collect::>(); + println!("{}", s); + assert_eq!(fds.len(), 2); + unsafe { + let mut read = File::from_raw_fd(fds[0].parse().unwrap()); + let mut write = File::from_raw_fd(fds[1].parse().unwrap()); + + let mut buf = [0]; + assert_eq!(read.read(&mut buf).unwrap(), 1); + assert_eq!(write.write(&buf).unwrap(), 1); + } + } + + #[cfg(windows)] + fn validate(_: &str) { + // a little too complicated for a test... + } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn makes_jobserver_used() { + let make = if cfg!(windows) {"mingw32-make"} else {"make"}; + if Command::new(make).arg("--version").output().is_err() { + return + } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "d3" } + "#) + .file("src/lib.rs", "") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#) + .file("d2/src/lib.rs", "") + .file("d3/Cargo.toml", r#" + [package] + name = "d3" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#) + .file("d3/src/lib.rs", "") + .file("dbuild.rs", r#" + use std::net::TcpStream; + use std::env; + use std::io::Read; + + fn main() { + let addr = env::var("ADDR").unwrap(); + let mut stream = TcpStream::connect(addr).unwrap(); + let mut v = Vec::new(); + stream.read_to_end(&mut v).unwrap(); + } + "#) + .file("Makefile", "\ +all: +\t+$(CARGO) build +") + .build(); + + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = l.local_addr().unwrap(); + + let child = thread::spawn(move || { + let a1 = l.accept().unwrap(); + let a2 = l.accept().unwrap(); + l.set_nonblocking(true).unwrap(); + + for _ in 0..1000 { + assert!(l.accept().is_err()); + thread::yield_now(); + } + + drop(a1); + l.set_nonblocking(false).unwrap(); + let a3 = l.accept().unwrap(); + + drop((a2, a3)); + }); + + assert_that(p.process(make) + .env("CARGO", cargo_exe()) + .env("ADDR", addr.to_string()) + .arg("-j2"), + execs().with_status(0)); + child.join().unwrap(); +} + +#[test] +fn jobserver_and_j() { + let make = if cfg!(windows) {"mingw32-make"} else {"make"}; + if Command::new(make).arg("--version").output().is_err() { + return + } + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("Makefile", "\ +all: +\t+$(CARGO) build -j2 +") + .build(); + + assert_that(p.process(make) + .env("CARGO", cargo_exe()) + .arg("-j2"), + execs().with_status(0).with_stderr("\ +warning: a `-j` argument was passed to Cargo but Cargo is also configured \ +with an external jobserver in its environment, ignoring the `-j` parameter +[COMPILING] [..] +[FINISHED] [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/local-registry.rs b/collector/compile-benchmarks/cargo/tests/local-registry.rs new file mode 100644 index 000000000..1fb466e6a --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/local-registry.rs @@ -0,0 +1,404 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::Package; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "#)); +} + +#[test] +fn simple() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); + assert_that(p.cargo("build"), execs().with_status(0).with_stderr("\ +[FINISHED] [..] +")); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn multiple_versions() { + setup(); + Package::new("foo", "0.0.1").local(true).publish(); + Package::new("foo", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); + + Package::new("foo", "0.2.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + assert_that(p.cargo("update").arg("-v"), + execs().with_status(0).with_stderr("\ +[UPDATING] foo v0.1.0 -> v0.2.0 +")); +} + +#[test] +fn multiple_names() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn interdependent() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn path_dep_rewritten() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", version = "*" } + "#) + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("foo/src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn invalid_dir_bad() { + setup(); + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = '/path/to/nowhere' + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry https://[..] + +Caused by: + failed to update replaced source `registry https://[..]` + +Caused by: + local registry path is not a directory: [..]path[..]to[..]nowhere +")); +} + +#[test] +fn different_directory_replacing_the_registry_is_bad() { + setup(); + + // Move our test's .cargo/config to a temporary location and publish a + // registry package we're going to use first. + let config = paths::root().join(".cargo"); + let config_tmp = paths::root().join(".cargo-old"); + t!(fs::rename(&config, &config_tmp)); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .build(); + + // Generate a lock file against the crates.io registry + Package::new("foo", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + // Switch back to our directory source, and now that we're replacing + // crates.io make sure that this fails because we're replacing with a + // different checksum + config.rm_rf(); + t!(fs::rename(&config_tmp, &config)); + Package::new("foo", "0.0.1") + .file("src/lib.rs", "invalid") + .local(true) + .publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] checksum for `foo v0.0.1` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.0.1` is the same as when the lockfile was generated + +")); +} + +#[test] +fn crates_io_registry_url_is_optional() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "#)); + + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); + assert_that(p.cargo("build"), execs().with_status(0).with_stderr("\ +[FINISHED] [..] +")); + assert_that(p.cargo("test"), execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/lockfile-compat.rs b/collector/compile-benchmarks/cargo/tests/lockfile-compat.rs new file mode 100644 index 000000000..0c90fd417 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/lockfile-compat.rs @@ -0,0 +1,451 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project, lines_match}; +use hamcrest::assert_that; + +#[test] +fn oldest_lockfile_still_works() { + let cargo_commands = vec![ + "build", + "update" + ]; + for cargo_command in cargo_commands { + oldest_lockfile_still_works_with_command(cargo_command); + } +} + +fn oldest_lockfile_still_works_with_command(cargo_command: &str) { + Package::new("foo", "0.1.0").publish(); + + let expected_lockfile = +r#"[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]" +"#; + + let old_lockfile = +r#"[root] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "zzz" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", old_lockfile) + .build(); + + assert_that(p.cargo(cargo_command), + execs().with_status(0)); + + let lock = p.read_lockfile(); + for (l, r) in expected_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), expected_lockfile.lines().count()); +} + + +#[test] +fn frozen_flag_preserves_old_lockfile() { + Package::new("foo", "0.1.0").publish(); + + let old_lockfile = + r#"[root] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f9e0a16bdf5c05435698fa27192d89e331b22a26a972c34984f560662544453b" +"#; + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "zzz" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", old_lockfile) + .build(); + + assert_that(p.cargo("build").arg("--locked"), + execs().with_status(0)); + + let lock = p.read_lockfile(); + for (l, r) in old_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), old_lockfile.lines().count()); +} + + +#[test] +fn totally_wild_checksums_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#); + + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let lock = p.read_lockfile(); + assert!(lock.starts_with(r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"#.trim())); +} + +#[test] +fn wrong_checksum_is_an_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#); + + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +")); +} + +// If the checksum is unlisted in the lockfile (e.g. ) yet we can +// calculate it (e.g. it's a registry dep), then we should in theory just fill +// it in. +#[test] +fn unlisted_checksum_is_bad_if_we_calculate() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#); + let p = p.build(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \ +could now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +")); +} + +// If the checksum is listed in the lockfile yet we cannot calculate it (e.g. +// git dependencies as of today), then make sure we choke. +#[test] +fn listed_checksum_bad_if_we_cannot_compute() { + let git = git::new("foo", |p| { + p.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + "#, git.url())) + .file("src/lib.rs", "") + .file("Cargo.lock", &format!(r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (git+{0})" +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum foo 0.1.0 (git+{0})" = "checksum" +"#, git.url())); + + let p = p.build(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository `[..]` +error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `foo v0.1.0 ([..])` is the same as when the lockfile was generated + +")); +} + +#[test] +fn current_lockfile_format() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let actual = p.read_lockfile(); + + let expected = "\ +[[package]] +name = \"bar\" +version = \"0.0.1\" +dependencies = [ + \"foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\", +] + +[[package]] +name = \"foo\" +version = \"0.1.0\" +source = \"registry+https://github.com/rust-lang/crates.io-index\" + +[metadata] +\"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\" = \"[..]\""; + + for (l, r) in expected.lines().zip(actual.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(actual.lines().count(), expected.lines().count()); +} + +#[test] +fn lockfile_without_root() { + Package::new("foo", "0.1.0").publish(); + + let lockfile = r#"[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("Cargo.lock", lockfile); + + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let lock = p.read_lockfile(); + assert!(lock.starts_with(lockfile.trim())); +} + +#[test] +fn locked_correct_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build").arg("--locked"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: the lock file needs to be updated but --locked was passed to prevent this +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/login.rs b/collector/compile-benchmarks/cargo/tests/login.rs new file mode 100644 index 000000000..9bb060a1d --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/login.rs @@ -0,0 +1,128 @@ +#[macro_use] +extern crate cargotest; +extern crate cargo; +extern crate hamcrest; +extern crate toml; + +use std::io::prelude::*; +use std::fs::{self, File}; + +use cargotest::cargo_process; +use cargotest::support::execs; +use cargotest::support::registry::registry; +use cargotest::install::cargo_home; +use cargo::util::config::Config; +use cargo::core::Shell; +use hamcrest::{assert_that, existing_file, is_not}; + +const TOKEN: &str = "test-token"; +const CONFIG_FILE: &str = r#" + [registry] + token = "api-token" +"#; + +fn setup_old_credentials() { + let config = cargo_home().join("config"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all(CONFIG_FILE.as_bytes())); +} + +fn setup_new_credentials() { + let config = cargo_home().join("credentials"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all(br#" + token = "api-token" + "#)); +} + +fn check_host_token(toml: toml::Value) -> bool { + match toml { + toml::Value::Table(table) => match table.get("token") { + Some(v) => match v { + &toml::Value::String(ref token) => (token.as_str() == TOKEN), + _ => false, + }, + None => false, + }, + _ => false, + } +} + +#[test] +fn login_with_old_credentials() { + setup_old_credentials(); + + assert_that(cargo_process().arg("login") + .arg("--host").arg(registry().to_string()).arg(TOKEN), + execs().with_status(0)); + + let config = cargo_home().join("config"); + assert_that(&config, existing_file()); + + let mut contents = String::new(); + File::open(&config).unwrap().read_to_string(&mut contents).unwrap(); + assert_eq!(CONFIG_FILE, contents); + + let credentials = cargo_home().join("credentials"); + assert_that(&credentials, existing_file()); + + contents.clear(); + File::open(&credentials).unwrap().read_to_string(&mut contents).unwrap(); + assert!(check_host_token(contents.parse().unwrap())); +} + +#[test] +fn login_with_new_credentials() { + setup_new_credentials(); + + assert_that(cargo_process().arg("login") + .arg("--host").arg(registry().to_string()).arg(TOKEN), + execs().with_status(0)); + + let config = cargo_home().join("config"); + assert_that(&config, is_not(existing_file())); + + let credentials = cargo_home().join("credentials"); + assert_that(&credentials, existing_file()); + + let mut contents = String::new(); + File::open(&credentials).unwrap().read_to_string(&mut contents).unwrap(); + assert!(check_host_token(contents.parse().unwrap())); +} + +#[test] +fn login_with_old_and_new_credentials() { + setup_new_credentials(); + login_with_old_credentials(); +} + +#[test] +fn login_without_credentials() { + assert_that(cargo_process().arg("login") + .arg("--host").arg(registry().to_string()).arg(TOKEN), + execs().with_status(0)); + + let config = cargo_home().join("config"); + assert_that(&config, is_not(existing_file())); + + let credentials = cargo_home().join("credentials"); + assert_that(&credentials, existing_file()); + + let mut contents = String::new(); + File::open(&credentials).unwrap().read_to_string(&mut contents).unwrap(); + assert!(check_host_token(contents.parse().unwrap())); +} + +#[test] +fn new_credentials_is_used_instead_old() { + setup_old_credentials(); + setup_new_credentials(); + + assert_that(cargo_process().arg("login") + .arg("--host").arg(registry().to_string()).arg(TOKEN), + execs().with_status(0)); + + let config = Config::new(Shell::new(), cargo_home(), cargo_home()); + let token = config.get_string("registry.token").unwrap().map(|p| p.val); + assert_eq!(token.unwrap(), TOKEN); +} diff --git a/collector/compile-benchmarks/cargo/tests/metadata.rs b/collector/compile-benchmarks/cargo/tests/metadata.rs new file mode 100644 index 000000000..4805b6741 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/metadata.rs @@ -0,0 +1,692 @@ +extern crate cargotest; +extern crate hamcrest; + +use hamcrest::assert_that; +use cargotest::support::registry::Package; +use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest, main_file}; + +#[test] +fn cargo_metadata_simple() { + let p = project("foo") + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + assert_that(p.cargo("metadata"), execs().with_json(r#" + { + "packages": [ + { + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "bin" + ], + "crate_types": [ + "bin" + ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]foo.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)); +} + +#[test] +fn cargo_metadata_warns_on_implicit_version() { + let p = project("foo") + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + assert_that(p.cargo("metadata"), + execs().with_stderr("\ +[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems")); + + assert_that(p.cargo("metadata").arg("--format-version").arg("1"), + execs().with_stderr("")); +} + +#[test] +fn library_with_several_crate_types() { + let p = project("foo") + .file("src/lib.rs", "") + .file("Cargo.toml", r#" +[package] +name = "foo" +version = "0.5.0" + +[lib] +crate-type = ["lib", "staticlib"] + "#) + .build(); + + assert_that(p.cargo("metadata"), execs().with_json(r#" + { + "packages": [ + { + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib", + "staticlib" + ], + "crate_types": [ + "lib", + "staticlib" + ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)); +} + +#[test] +fn cargo_metadata_with_deps_and_version() { + let p = project("foo") + .file("src/foo.rs", "") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + license = "MIT" + description = "foo" + + [[bin]] + name = "foo" + + [dependencies] + bar = "*" + "#) + .build(); + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); + + assert_that(p.cargo("metadata") + .arg("-q") + .arg("--format-version").arg("1"), + execs().with_json(r#" + { + "packages": [ + { + "dependencies": [], + "features": {}, + "id": "baz 0.0.1 (registry+[..])", + "manifest_path": "[..]Cargo.toml", + "name": "baz", + "source": "registry+[..]", + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "name": "baz", + "src_path": "[..]lib.rs" + } + ], + "version": "0.0.1" + }, + { + "dependencies": [ + { + "features": [], + "kind": null, + "name": "baz", + "optional": false, + "req": "^0.0.1", + "source": "registry+[..]", + "target": null, + "uses_default_features": true + } + ], + "features": {}, + "id": "bar 0.0.1 (registry+[..])", + "manifest_path": "[..]Cargo.toml", + "name": "bar", + "source": "registry+[..]", + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "name": "bar", + "src_path": "[..]lib.rs" + } + ], + "version": "0.0.1" + }, + { + "dependencies": [ + { + "features": [], + "kind": null, + "name": "bar", + "optional": false, + "req": "*", + "source": "registry+[..]", + "target": null, + "uses_default_features": true + } + ], + "features": {}, + "id": "foo 0.5.0 (path+file:[..]foo)", + "manifest_path": "[..]Cargo.toml", + "name": "foo", + "source": null, + "license": "MIT", + "license_file": null, + "description": "foo", + "targets": [ + { + "kind": [ + "bin" + ], + "crate_types": [ + "bin" + ], + "name": "foo", + "src_path": "[..]foo.rs" + } + ], + "version": "0.5.0" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [ + "bar 0.0.1 (registry+[..])" + ], + "id": "foo 0.5.0 (path+file:[..]foo)" + }, + { + "dependencies": [ + "baz 0.0.1 (registry+[..])" + ], + "id": "bar 0.0.1 (registry+[..])" + }, + { + "dependencies": [], + "id": "baz 0.0.1 (registry+[..])" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)); +} + +#[test] +fn example() { + let p = project("foo") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file("Cargo.toml", r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" + "#) + .build(); + + assert_that(p.cargo("metadata"), execs().with_json(r#" + { + "packages": [ + { + "name": "foo", + "version": "0.1.0", + "id": "foo[..]", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "bin" ], + "name": "ex", + "src_path": "[..][/]foo[/]examples[/]ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "dependencies": [] + } + ] + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)); +} + +#[test] +fn example_lib() { + let p = project("foo") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file("Cargo.toml", r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" +crate-type = ["rlib", "dylib"] + "#) + .build(); + + assert_that(p.cargo("metadata"), execs().with_json(r#" + { + "packages": [ + { + "name": "foo", + "version": "0.1.0", + "id": "foo[..]", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "rlib", "dylib" ], + "name": "ex", + "src_path": "[..][/]foo[/]examples[/]ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "dependencies": [] + } + ] + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)); +} + +#[test] +fn workspace_metadata() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar", "baz"] + "#) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("metadata"), execs().with_status(0).with_json(r#" + { + "packages": [ + { + "name": "bar", + "version": "0.5.0", + "id": "bar[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "bar", + "src_path": "[..]bar[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar[/]Cargo.toml" + }, + { + "name": "baz", + "version": "0.5.0", + "id": "baz[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "baz", + "src_path": "[..]baz[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz[/]Cargo.toml" + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "id": "baz 0.5.0 (path+file:[..]baz)" + }, + { + "dependencies": [], + "id": "bar 0.5.0 (path+file:[..]bar)" + } + ], + "root": null + }, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)) +} + +#[test] +fn workspace_metadata_no_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar", "baz"] + "#) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps"), execs().with_status(0).with_json(r#" + { + "packages": [ + { + "name": "bar", + "version": "0.5.0", + "id": "bar[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "bar", + "src_path": "[..]bar[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar[/]Cargo.toml" + }, + { + "name": "baz", + "version": "0.5.0", + "id": "baz[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": ["lib"], + "name": "baz", + "src_path": "[..]baz[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz[/]Cargo.toml" + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": null, + "target_directory": "[..]foo[/]target", + "version": 1 + }"#)) +} + +#[test] +fn cargo_metadata_with_invalid_manifest() { + let p = project("foo") + .file("Cargo.toml", "") + .build(); + + assert_that(p.cargo("metadata").arg("--format-version").arg("1"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + no `package` section found.")) +} + +const MANIFEST_OUTPUT: &'static str= + r#" +{ + "packages": [{ + "name":"foo", + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "source":null, + "dependencies":[], + "license": null, + "license_file": null, + "description": null, + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..][/]foo[/]src[/]foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml" + }], + "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], + "resolve": null, + "target_directory": "[..]foo[/]target", + "version": 1 +}"#; + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .arg("--manifest-path").arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .arg("--manifest-path").arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .arg("--manifest-path").arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr("[ERROR] the manifest-path must be \ + a path to a Cargo.toml file")); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .arg("--manifest-path").arg(p.root()) + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr("[ERROR] the manifest-path must be \ + a path to a Cargo.toml file")); +} + +#[test] +fn cargo_metadata_no_deps_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .cwd(p.root()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} + +#[test] +fn cargo_metadata_bad_version() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("metadata").arg("--no-deps") + .arg("--format-version").arg("2") + .cwd(p.root()), + execs().with_status(101) + .with_stderr("[ERROR] metadata version 2 not supported, only 1 is currently supported")); +} + +#[test] +fn multiple_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + b = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("metadata") + .arg("--features").arg("a b"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/net-config.rs b/collector/compile-benchmarks/cargo/tests/net-config.rs new file mode 100644 index 000000000..aeaacafc7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/net-config.rs @@ -0,0 +1,58 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn net_retry_loads_from_config() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:11/foo/bar" + "#) + .file("src/main.rs", "").file(".cargo/config", r#" + [net] + retry=1 + [http] + timeout=1 + "#) + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr_contains("[WARNING] spurious network error \ +(1 tries remaining): [..]")); +} + +#[test] +fn net_retry_git_outputs_warning() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:11/foo/bar" + "#) + .file(".cargo/config", r#" + [http] + timeout=1 + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v").arg("-j").arg("1"), + execs().with_status(101) + .with_stderr_contains("[WARNING] spurious network error \ +(2 tries remaining): [..]") + .with_stderr_contains("\ +[WARNING] spurious network error (1 tries remaining): [..]")); +} diff --git a/collector/compile-benchmarks/cargo/tests/new.rs b/collector/compile-benchmarks/cargo/tests/new.rs new file mode 100644 index 000000000..91463273e --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/new.rs @@ -0,0 +1,444 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; +extern crate tempdir; + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::env; + +use cargo::util::ProcessBuilder; +use cargotest::process; +use cargotest::support::{execs, paths}; +use hamcrest::{assert_that, existing_file, existing_dir, is_not}; +use tempdir::TempDir; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::cargo_process(); + p.arg(s); + p +} + +fn create_empty_gitconfig() { + // This helps on Windows where libgit2 is very aggressive in attempting to + // find a git config file. + let gitconfig = paths::home().join(".gitconfig"); + File::create(gitconfig).unwrap(); +} + + +#[test] +fn simple_lib() { + assert_that(cargo_process("new").arg("--lib").arg("foo").arg("--vcs").arg("none") + .env("USER", "foo"), + execs().with_status(0).with_stderr("\ +[CREATED] library `foo` project +")); + + assert_that(&paths::root().join("foo"), existing_dir()); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/lib.rs"), existing_file()); + assert_that(&paths::root().join("foo/.gitignore"), is_not(existing_file())); + + let lib = paths::root().join("foo/src/lib.rs"); + let mut contents = String::new(); + File::open(&lib).unwrap().read_to_string(&mut contents).unwrap(); + assert_eq!(contents, r#"#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} +"#); + + assert_that(cargo_process("build").cwd(&paths::root().join("foo")), + execs().with_status(0)); +} + +#[test] +fn simple_bin() { + assert_that(cargo_process("new").arg("--bin").arg("foo") + .env("USER", "foo"), + execs().with_status(0).with_stderr("\ +[CREATED] binary (application) `foo` project +")); + + assert_that(&paths::root().join("foo"), existing_dir()); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); + + assert_that(cargo_process("build").cwd(&paths::root().join("foo")), + execs().with_status(0)); + assert_that(&paths::root().join(&format!("foo/target/debug/foo{}", + env::consts::EXE_SUFFIX)), + existing_file()); +} + +#[test] +fn both_lib_and_bin() { + let td = TempDir::new("cargo").unwrap(); + assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo").cwd(td.path()) + .env("USER", "foo"), + execs().with_status(101).with_stderr( + "[ERROR] can't specify both lib and binary outputs")); +} + +#[test] +fn simple_git() { + let td = TempDir::new("cargo").unwrap(); + assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path()) + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(td.path(), existing_dir()); + assert_that(&td.path().join("foo/Cargo.toml"), existing_file()); + assert_that(&td.path().join("foo/src/lib.rs"), existing_file()); + assert_that(&td.path().join("foo/.git"), existing_dir()); + assert_that(&td.path().join("foo/.gitignore"), existing_file()); + + assert_that(cargo_process("build").cwd(&td.path().join("foo")), + execs().with_status(0)); +} + +#[test] +fn no_argument() { + assert_that(cargo_process("new"), + execs().with_status(1) + .with_stderr("\ +[ERROR] Invalid arguments. + +Usage: + cargo new [options] + cargo new -h | --help +")); +} + +#[test] +fn existing() { + let dst = paths::root().join("foo"); + fs::create_dir(&dst).unwrap(); + assert_that(cargo_process("new").arg("foo"), + execs().with_status(101) + .with_stderr(format!("[ERROR] destination `{}` already exists\n\n\ + Use `cargo init` to initialize the directory", + dst.display()))); +} + +#[test] +fn invalid_characters() { + assert_that(cargo_process("new").arg("foo.rs"), + execs().with_status(101) + .with_stderr("\ +[ERROR] Invalid character `.` in crate name: `foo.rs` +use --name to override crate name")); +} + +#[test] +fn reserved_name() { + assert_that(cargo_process("new").arg("test"), + execs().with_status(101) + .with_stderr("\ +[ERROR] The name `test` cannot be used as a crate name\n\ +use --name to override crate name")); +} + +#[test] +fn reserved_binary_name() { + assert_that(cargo_process("new").arg("--bin").arg("incremental"), + execs().with_status(101) + .with_stderr("\ +[ERROR] The name `incremental` cannot be used as a crate name\n\ +use --name to override crate name")); +} + +#[test] +fn keyword_name() { + assert_that(cargo_process("new").arg("pub"), + execs().with_status(101) + .with_stderr("\ +[ERROR] The name `pub` cannot be used as a crate name\n\ +use --name to override crate name")); +} + +#[test] +fn rust_prefix_stripped() { + assert_that(cargo_process("new").arg("--lib").arg("rust-foo").env("USER", "foo"), + execs().with_status(0) + .with_stderr_contains("note: package will be named `foo`; use --name to override")); + let toml = paths::root().join("rust-foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"name = "foo""#)); +} + +#[test] +fn bin_disables_stripping() { + assert_that(cargo_process("new").arg("rust-foo").arg("--bin").env("USER", "foo"), + execs().with_status(0)); + let toml = paths::root().join("rust-foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"name = "rust-foo""#)); +} + +#[test] +fn explicit_name_not_stripped() { + assert_that(cargo_process("new").arg("foo").arg("--name").arg("rust-bar").env("USER", "foo"), + execs().with_status(0)); + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"name = "rust-bar""#)); +} + +#[test] +fn finds_author_user() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + create_empty_gitconfig(); + assert_that(cargo_process("new").arg("foo").env("USER", "foo") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[test] +fn finds_author_user_escaped() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + create_empty_gitconfig(); + assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\"") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); +} + +#[test] +fn finds_author_username() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + create_empty_gitconfig(); + assert_that(cargo_process("new").arg("foo") + .env_remove("USER") + .env("USERNAME", "foo") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[test] +fn finds_author_priority() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + assert_that(cargo_process("new").arg("foo") + .env("USER", "bar2") + .env("EMAIL", "baz2") + .env("CARGO_NAME", "bar") + .env("CARGO_EMAIL", "baz") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_author_email() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + create_empty_gitconfig(); + assert_that(cargo_process("new").arg("foo") + .env("USER", "bar") + .env("EMAIL", "baz") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_author_git() { + process("git").args(&["config", "--global", "user.name", "bar"]) + .exec().unwrap(); + process("git").args(&["config", "--global", "user.email", "baz"]) + .exec().unwrap(); + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0)); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_local_author_git() { + process("git").args(&["init"]) + .exec().unwrap(); + process("git").args(&["config", "--global", "user.name", "foo"]) + .exec().unwrap(); + process("git").args(&["config", "--global", "user.email", "foo@bar"]) + .exec().unwrap(); + + // Set local git user config + process("git").args(&["config", "user.name", "bar"]) + .exec().unwrap(); + process("git").args(&["config", "user.email", "baz"]) + .exec().unwrap(); + assert_that(cargo_process("init").env("USER", "foo"), + execs().with_status(0)); + + let toml = paths::root().join("Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_git_email() { + let td = TempDir::new("cargo").unwrap(); + assert_that(cargo_process("new").arg("foo") + .env("GIT_AUTHOR_NAME", "foo") + .env("GIT_AUTHOR_EMAIL", "gitfoo") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#), contents); +} + + +#[test] +fn finds_git_author() { + // Use a temp dir to make sure we don't pick up .cargo/config somewhere in + // the hierarchy + let td = TempDir::new("cargo").unwrap(); + create_empty_gitconfig(); + assert_that(cargo_process("new").arg("foo") + .env_remove("USER") + .env("GIT_COMMITTER_NAME", "gitfoo") + .cwd(td.path()), + execs().with_status(0)); + + let toml = td.path().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["gitfoo"]"#)); +} + +#[test] +fn author_prefers_cargo() { + process("git").args(&["config", "--global", "user.name", "foo"]) + .exec().unwrap(); + process("git").args(&["config", "--global", "user.email", "bar"]) + .exec().unwrap(); + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")).unwrap().write_all(br#" + [cargo-new] + name = "new-foo" + email = "new-bar" + vcs = "none" + "#).unwrap(); + + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0)); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); + assert!(contents.contains(r#"authors = ["new-foo "]"#)); + assert!(!root.join("foo/.gitignore").exists()); +} + +#[test] +fn git_prefers_command_line() { + let root = paths::root(); + let td = TempDir::new("cargo").unwrap(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")).unwrap().write_all(br#" + [cargo-new] + vcs = "none" + name = "foo" + email = "bar" + "#).unwrap(); + + assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git") + .cwd(td.path()) + .env("USER", "foo"), + execs().with_status(0)); + assert!(td.path().join("foo/.gitignore").exists()); +} + +#[test] +fn subpackage_no_git() { + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0)); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + assert_that(cargo_process("new").arg("foo/components/subcomponent") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join("foo/components/subcomponent/.git"), + is_not(existing_file())); + assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"), + is_not(existing_file())); +} + +#[test] +fn subpackage_git_with_vcs_arg() { + assert_that(cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0)); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + assert_that(cargo_process("new").arg("foo/components/subcomponent") + .arg("--vcs").arg("git") + .env("USER", "foo"), + execs().with_status(0)); + + assert_that(&paths::root().join("foo/components/subcomponent/.git"), + existing_dir()); + assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"), + existing_file()); +} + +#[test] +fn unknown_flags() { + assert_that(cargo_process("new").arg("foo").arg("--flag"), + execs().with_status(1) + .with_stderr("\ +[ERROR] Unknown flag: '--flag' + +Usage: + cargo new [..] + cargo new [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/overrides.rs b/collector/compile-benchmarks/cargo/tests/overrides.rs new file mode 100644 index 000000000..afcd94190 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/overrides.rs @@ -0,0 +1,1277 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn override_simple() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn missing_version() { + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + foo = { git = 'https://example.com' } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a version to replace, but `[..]foo` does not +")); +} + +#[test] +fn invalid_semver_version() { + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + + [replace] + "foo:*" = { git = 'https://example.com' } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a valid semver version to replace, but `foo:*` does not +")); +} + +#[test] +fn different_version() { + Package::new("foo", "0.2.0").publish(); + Package::new("foo", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = "0.2.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for [..] +")); +} + +#[test] +fn transitive() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.2.0") + .dep("foo", "0.1.0") + .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] bar v0.2.0 (registry [..]) +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.2.0 +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn persists_across_rebuilds() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn replace_registry_with_path() { + Package::new("foo", "0.1.0").publish(); + + let _ = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = { path = "../foo" } + "#) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn use_a_spec_to_select() { + Package::new("foo", "0.1.1") + .file("src/lib.rs", "pub fn foo1() {}") + .publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.1") + .dep("foo", "0.2") + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { foo::foo3(); } + ") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo3() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + foo = "0.1" + + [replace] + "foo:0.2.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + extern crate bar; + + pub fn local() { + foo::foo1(); + bar::bar(); + } + ") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] [..] +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn override_adds_some_deps() { + Package::new("foo", "0.1.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1" + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] foo v0.1.1 (registry [..]) +[COMPILING] foo v0.1.1 +[COMPILING] bar v0.1.0 ([..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + + Package::new("foo", "0.1.2").publish(); + assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", foo.url())), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `file://[..]` +")); + assert_that(p.cargo("update") + .arg("-p") + .arg("https://github.com/rust-lang/crates.io-index#bar"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +")); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn locked_means_locked_yes_no_seriously_i_mean_locked() { + // this in theory exercises #2041 + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn override_wrong_name() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +error: no matching package for override `[..]foo:0.1.0` found +location searched: file://[..] +version required: = 0.1.0 +")); +} + +#[test] +fn override_with_nothing() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update file://[..] + +Caused by: + Could not find Cargo.toml in `[..]` +")); +} + +#[test] +fn override_wrong_version() { + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [replace] + "foo:0.1.0" = { git = 'https://example.com', version = '0.2.0' } + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for `[..]foo:0.1.0` +")); +} + +#[test] +fn multiple_specs() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + + [replace."https://github.com/rust-lang/crates.io-index#foo:0.1.0"] + git = '{0}' + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +error: overlapping replacement specifications found: + + * [..] + * [..] + +both specifications match: foo v0.1.0 +")); +} + +#[test] +fn test_override_dep() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-p").arg("foo"), + execs().with_status(101) + .with_stderr_contains("\ +error: There are multiple `foo` packages in your project, and the [..] +Please re-run this command with [..] + [..]#foo:0.1.0 + [..]#foo:0.1.0 +")); +} + +#[test] +fn update() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + assert_that(p.cargo("update"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] git repository `[..]` +")); +} + +// local -> near -> far +// near is overridden with itself +#[test] +fn no_override_self() { + let deps = git::repo(&paths::root().join("override")) + + .file("far/Cargo.toml", r#" + [package] + name = "far" + version = "0.1.0" + authors = [] + "#) + .file("far/src/lib.rs", "") + + .file("near/Cargo.toml", r#" + [package] + name = "near" + version = "0.1.0" + authors = [] + + [dependencies] + far = { path = "../far" } + "#) + .file("near/src/lib.rs", r#" + #![no_std] + pub extern crate far; + "#) + .build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + near = {{ git = '{0}' }} + + [replace] + "near:0.1.0" = {{ git = '{0}' }} + "#, deps.url())) + .file("src/lib.rs", r#" + #![no_std] + pub extern crate near; + "#) + .build(); + + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(0)); +} + +#[test] +fn broken_path_override_warns() { + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a1" } + "#) + .file("src/lib.rs", "") + .file("a1/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + "#) + .file("a1/src/lib.rs", "") + .file("a2/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2" + "#) + .file("a2/src/lib.rs", "") + .file(".cargo/config", r#" + paths = ["a2"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] [..] +warning: path override for crate `a` has altered the original list of +dependencies; the dependency on `foo` was either added or +modified to not match the previously resolved version + +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +http://doc.crates.io/specifying-dependencies.html#overriding-dependencies + +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +")); +} + +#[test] +fn override_an_override() { + Package::new("chrono", "0.2.0").dep("serde", "< 0.9").publish(); + Package::new("serde", "0.7.0") + .file("src/lib.rs", "pub fn serde07() {}") + .publish(); + Package::new("serde", "0.8.0") + .file("src/lib.rs", "pub fn serde08() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + chrono = "0.2" + serde = "0.8" + + [replace] + "chrono:0.2.0" = { path = "chrono" } + "serde:0.8.0" = { path = "serde" } + "#) + .file("Cargo.lock", r#" + [[package]] + name = "local" + version = "0.0.1" + dependencies = [ + "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "chrono" + version = "0.2.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "chrono 0.2.0" + + [[package]] + name = "chrono" + version = "0.2.0" + dependencies = [ + "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "serde" + version = "0.7.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "serde" + version = "0.8.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "serde 0.8.0" + + [[package]] + name = "serde" + version = "0.8.0" + "#) + .file("src/lib.rs", " + extern crate chrono; + extern crate serde; + + pub fn local() { + chrono::chrono(); + serde::serde08_override(); + } + ") + .file("chrono/Cargo.toml", r#" + [package] + name = "chrono" + version = "0.2.0" + authors = [] + + [dependencies] + serde = "< 0.9" + "#) + .file("chrono/src/lib.rs", " + extern crate serde; + pub fn chrono() { + serde::serde07(); + } + ") + .file("serde/Cargo.toml", r#" + [package] + name = "serde" + version = "0.8.0" + authors = [] + "#) + .file("serde/src/lib.rs", " + pub fn serde08_override() {} + ") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn overriding_nonexistent_no_spurious() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn foo() {}") + .build(); + + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{url}' }} + "bar:0.1.0" = {{ git = '{url}' }} + "#, url = foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[WARNING] package replacement is not used: [..]bar:0.1.0 +[FINISHED] [..] +").with_stdout("")); +} + +#[test] +fn no_warnings_when_replace_is_used_in_another_workspace_member() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("ws") + .file("Cargo.toml", r#" + [workspace] + members = [ "first_crate", "second_crate"] + + [replace] + "foo:0.1.0" = { path = "local_foo" }"#) + .file("first_crate/Cargo.toml", r#" + [package] + name = "first_crate" + version = "0.1.0" + + [dependencies] + foo = "0.1.0" + "#) + .file("first_crate/src/lib.rs", "") + .file("second_crate/Cargo.toml", r#" + [package] + name = "second_crate" + version = "0.1.0" + "#) + .file("second_crate/src/lib.rs", "") + .file("local_foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + "#) + .file("local_foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").cwd(p.root().join("first_crate")), + execs().with_status(0) + .with_stdout("") + .with_stderr("\ +[UPDATING] registry `[..]` +[COMPILING] foo v0.1.0 ([..]) +[COMPILING] first_crate v0.1.0 ([..]) +[FINISHED] [..]")); + + assert_that(p.cargo("build").cwd(p.root().join("second_crate")), + execs().with_status(0) + .with_stdout("") + .with_stderr("\ +[COMPILING] second_crate v0.1.0 ([..]) +[FINISHED] [..]")); +} + + +#[test] +fn override_to_path_dep() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("foo/bar/src/lib.rs", "") + .file(".cargo/config", r#" + paths = ["foo"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn replace_to_path_dep() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = { path = "foo" } + "#) + .file("src/lib.rs", "extern crate foo;") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#) + .file("foo/src/lib.rs", " + extern crate bar; + + pub fn foo() { + bar::bar(); + } + ") + .file("foo/bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn paths_ok_with_optional() { + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#) + .file("foo/src/lib.rs", "") + .file("foo2/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#) + .file("foo2/src/lib.rs", "") + .file(".cargo/config", r#" + paths = ["foo2"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 ([..]foo2) +[COMPILING] local v0.0.1 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn paths_add_optional_bad() { + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .file("foo2/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#) + .file("foo2/src/lib.rs", "") + .file(".cargo/config", r#" + paths = ["foo2"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr_contains("\ +warning: path override for crate `foo` has altered the original list of +dependencies; the dependency on `bar` was either added or\ +")); +} + +#[test] +fn override_with_default_feature() { + Package::new("another", "0.1.0").publish(); + Package::new("another", "0.1.1") + .dep("bar", "0.1") + .publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", default-features = false } + another = "0.1" + another2 = { path = "another2" } + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#) + .file("src/main.rs", r#" + extern crate bar; + + fn main() { + bar::bar(); + } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [features] + default = [] + "#) + .file("bar/src/lib.rs", r#" + #[cfg(feature = "default")] + pub fn bar() {} + "#) + .file("another2/Cargo.toml", r#" + [package] + name = "another2" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", default-features = false } + "#) + .file("another2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0)); +} + +#[test] +fn override_plus_dep() { + Package::new("bar", "0.1.0").publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = ".." } + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +error: cyclic package dependency: [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/package.rs b/collector/compile-benchmarks/cargo/tests/package.rs new file mode 100644 index 000000000..42a491ebf --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/package.rs @@ -0,0 +1,880 @@ +#[macro_use] +extern crate cargotest; +extern crate flate2; +extern crate git2; +extern crate hamcrest; +extern crate tar; + +use std::fs::File; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use cargotest::{cargo_process, process}; +use cargotest::support::{project, execs, paths, git, path2url, cargo_exe}; +use cargotest::support::registry::Package; +use flate2::read::GzDecoder; +use hamcrest::{assert_that, existing_file, contains, equal_to}; +use tar::Archive; + +#[test] +fn simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("src/bar.txt", "") // should be ignored when packaging + .build(); + + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +[WARNING] manifest has no documentation[..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); + assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file()); + assert_that(p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout("\ +Cargo.toml +src[/]main.rs +")); + assert_that(p.cargo("package"), + execs().with_status(0).with_stdout("")); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path_bytes(); + let fname = &*fname; + assert!(fname == b"foo-0.0.1/Cargo.toml" || + fname == b"foo-0.0.1/Cargo.toml.orig" || + fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", f.header().path()) + } +} + +#[test] +fn metadata_warning() { + let p = project("all") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +warning: manifest has no description, license, license-file, documentation, \ +homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); + + let p = project("one") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +warning: manifest has no description, documentation, homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); + + let p = project("all") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); +} + +#[test] +fn package_verbose() { + let root = paths::root().join("all"); + let p = git::repo(&root) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + let mut cargo = cargo_process(); + cargo.cwd(p.root()); + assert_that(cargo.clone().arg("build"), execs().with_status(0)); + + println!("package main repo"); + assert_that(cargo.clone().arg("package").arg("-v").arg("--no-verify"), + execs().with_status(0).with_stderr("\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +")); + + println!("package sub-repo"); + assert_that(cargo.arg("package").arg("-v").arg("--no-verify") + .cwd(p.root().join("a")), + execs().with_status(0).with_stderr("\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] a v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +")); +} + +#[test] +fn package_verification() { + let p = project("all") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); +} + +#[test] +fn path_dependency_no_version() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("package"), + execs().with_status(101).with_stderr("\ +[WARNING] manifest has no documentation, homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[ERROR] all path dependencies must have a version specified when packaging. +dependency `bar` does not specify a version. +")); +} + +#[test] +fn exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = [ + "*.txt", + # file in root + "file_root_1", # NO_CHANGE (ignored) + "/file_root_2", # CHANGING (packaged -> ignored) + "file_root_3/", # NO_CHANGE (packaged) + "file_root_4/*", # NO_CHANGE (packaged) + "file_root_5/**", # NO_CHANGE (packaged) + # file in sub-dir + "file_deep_1", # CHANGING (packaged -> ignored) + "/file_deep_2", # NO_CHANGE (packaged) + "file_deep_3/", # NO_CHANGE (packaged) + "file_deep_4/*", # NO_CHANGE (packaged) + "file_deep_5/**", # NO_CHANGE (packaged) + # dir in root + "dir_root_1", # CHANGING (packaged -> ignored) + "/dir_root_2", # CHANGING (packaged -> ignored) + "dir_root_3/", # CHANGING (packaged -> ignored) + "dir_root_4/*", # NO_CHANGE (ignored) + "dir_root_5/**", # NO_CHANGE (ignored) + # dir in sub-dir + "dir_deep_1", # CHANGING (packaged -> ignored) + "/dir_deep_2", # NO_CHANGE + "dir_deep_3/", # CHANGING (packaged -> ignored) + "dir_deep_4/*", # CHANGING (packaged -> ignored) + "dir_deep_5/**", # CHANGING (packaged -> ignored) + ] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("bar.txt", "") + .file("src/bar.txt", "") + // file in root + .file("file_root_1", "") + .file("file_root_2", "") + .file("file_root_3", "") + .file("file_root_4", "") + .file("file_root_5", "") + // file in sub-dir + .file("some_dir/file_deep_1", "") + .file("some_dir/file_deep_2", "") + .file("some_dir/file_deep_3", "") + .file("some_dir/file_deep_4", "") + .file("some_dir/file_deep_5", "") + // dir in root + .file("dir_root_1/some_dir/file", "") + .file("dir_root_2/some_dir/file", "") + .file("dir_root_3/some_dir/file", "") + .file("dir_root_4/some_dir/file", "") + .file("dir_root_5/some_dir/file", "") + // dir in sub-dir + .file("some_dir/dir_deep_1/some_dir/file", "") + .file("some_dir/dir_deep_2/some_dir/file", "") + .file("some_dir/dir_deep_3/some_dir/file", "") + .file("some_dir/dir_deep_4/some_dir/file", "") + .file("some_dir/dir_deep_5/some_dir/file", "") + .build(); + + assert_that(p.cargo("package").arg("--no-verify").arg("-v"), + execs().with_status(0).with_stdout("").with_stderr("\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[WARNING] [..] file `dir_root_1[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `dir_root_2[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `dir_root_3[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_1[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_3[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_4[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_5[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]file_deep_1` WILL be excluded [..] +See [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +")); + + assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file()); + + assert_that(p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout("\ +Cargo.toml +dir_root_1[/]some_dir[/]file +dir_root_2[/]some_dir[/]file +dir_root_3[/]some_dir[/]file +file_root_3 +file_root_4 +file_root_5 +some_dir[/]dir_deep_1[/]some_dir[/]file +some_dir[/]dir_deep_2[/]some_dir[/]file +some_dir[/]dir_deep_3[/]some_dir[/]file +some_dir[/]dir_deep_4[/]some_dir[/]file +some_dir[/]dir_deep_5[/]some_dir[/]file +some_dir[/]file_deep_1 +some_dir[/]file_deep_2 +some_dir[/]file_deep_3 +some_dir[/]file_deep_4 +some_dir[/]file_deep_5 +src[/]main.rs +")); +} + +#[test] +fn include() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + include = ["foo.txt", "**/*.rs", "Cargo.toml"] + "#) + .file("foo.txt", "") + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("src/bar.txt", "") // should be ignored when packaging + .build(); + + assert_that(p.cargo("package").arg("--no-verify").arg("-v"), + execs().with_status(0).with_stderr("\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +")); +} + +#[test] +fn package_lib_with_bin() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + extern crate foo; + fn main() {} + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("package").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn package_git_submodule() { + let project = git::new("foo", |project| { + project.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = ["foo@example.com"] + license = "MIT" + description = "foo" + repository = "foo" + "#) + .file("src/lib.rs", "pub fn foo() {}") + }).unwrap(); + let library = git::new("bar", |library| { + library.file("Makefile", "all:") + }).unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("bar")); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); + repository.reset(&repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, None).unwrap(); + + assert_that(cargo_process().arg("package").cwd(project.root()) + .arg("--no-verify").arg("-v"), + execs().with_status(0).with_stderr_contains("[ARCHIVING] bar/Makefile")); +} + +#[test] +fn no_duplicates_from_modified_tracked_files() { + let root = paths::root().join("all"); + let p = git::repo(&root) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + File::create(p.root().join("src/main.rs")).unwrap().write_all(br#" + fn main() { println!("A change!"); } + "#).unwrap(); + let mut cargo = cargo_process(); + cargo.cwd(p.root()); + assert_that(cargo.clone().arg("build"), execs().with_status(0)); + assert_that(cargo.arg("package").arg("--list"), + execs().with_status(0).with_stdout("\ +Cargo.toml +src/main.rs +")); +} + +#[test] +fn ignore_nested() { + let cargo_toml = r#" + [project] + name = "nested" + version = "0.0.1" + authors = [] + license = "MIT" + description = "nested" + "#; + let main_rs = r#" + fn main() { println!("hello"); } + "#; + let p = project("nested") + .file("Cargo.toml", cargo_toml) + .file("src/main.rs", main_rs) + // If a project happens to contain a copy of itself, we should + // ignore it. + .file("a_dir/nested/Cargo.toml", cargo_toml) + .file("a_dir/nested/src/main.rs", main_rs) + .build(); + + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(&format!("\ +[WARNING] manifest has no documentation[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] nested v0.0.1 ({dir}) +[VERIFYING] nested v0.0.1 ({dir}) +[COMPILING] nested v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); + assert_that(&p.root().join("target/package/nested-0.0.1.crate"), existing_file()); + assert_that(p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout("\ +Cargo.toml +src[..]main.rs +")); + assert_that(p.cargo("package"), + execs().with_status(0).with_stdout("")); + + let f = File::open(&p.root().join("target/package/nested-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path_bytes(); + let fname = &*fname; + assert!(fname == b"nested-0.0.1/Cargo.toml" || + fname == b"nested-0.0.1/Cargo.toml.orig" || + fname == b"nested-0.0.1/src/main.rs", + "unexpected filename: {:?}", f.header().path()) + } +} + +#[cfg(unix)] // windows doesn't allow these characters in filenames +#[test] +fn package_weird_characters() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("src/:foo", "") + .build(); + + assert_that(p.cargo("package"), + execs().with_status(101).with_stderr("\ +warning: [..] +See [..] +[PACKAGING] foo [..] +[ERROR] failed to prepare local package for uploading + +Caused by: + cannot package a filename with a special character `:`: src/:foo +")); +} + +#[test] +fn repackage_on_source_change() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + assert_that(p.cargo("package"), + execs().with_status(0)); + + // Add another source file + let mut file = File::create(p.root().join("src").join("foo.rs")).unwrap_or_else(|e| { + panic!("could not create file {}: {}", p.root().join("src/foo.rs").display(), e) + }); + + file.write_all(br#" + fn main() { println!("foo"); } + "#).unwrap(); + std::mem::drop(file); + + let mut pro = process(&cargo_exe()); + pro.arg("package").cwd(p.root()); + + // Check that cargo rebuilds the tarball + assert_that(pro, execs().with_status(0).with_stderr(&format!("\ +[WARNING] [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); + + // Check that the tarball contains the added file + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let entries = ar.entries().unwrap(); + let entry_paths = entries.map(|entry| { + entry.unwrap().path().unwrap().into_owned() + }).collect::>(); + assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.1/src/foo.rs")])); +} + +#[test] +#[cfg(unix)] +fn broken_symlink() { + use std::os::unix::fs; + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = 'foo' + documentation = 'foo' + homepage = 'foo' + repository = 'foo' + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + + assert_that(p.cargo("package").arg("-v"), + execs().with_status(101) + .with_stderr_contains("\ +error: failed to prepare local package for uploading + +Caused by: + failed to open for archiving: `[..]foo.rs` + +Caused by: + [..] +")); +} + +#[test] +fn do_not_package_if_repository_is_dirty() { + let p = project("foo").build(); + + // Create a Git repository containing a minimal Rust project. + let _ = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + // Modify Cargo.toml without committing the change. + p.change_file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + # change + "#); + + assert_that(p.cargo("package"), + execs().with_status(101) + .with_stderr("\ +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +Cargo.toml + +to proceed despite this, pass the `--allow-dirty` flag +")); +} + +#[test] +fn generated_manifest() { + Package::new("abc", "1.0.0").publish(); + Package::new("def", "1.0.0").publish(); + Package::new("ghi", "1.0.0").publish(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + + [project.metadata] + foo = 'bar' + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + def = "1.0" + ghi = "1.0" + abc = "1.0" + "#) + .file("src/main.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("package").arg("--no-verify"), + execs().with_status(0)); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let mut entry = ar.entries().unwrap() + .map(|f| f.unwrap()) + .find(|e| e.path().unwrap().ends_with("Cargo.toml")) + .unwrap(); + let mut contents = String::new(); + entry.read_to_string(&mut contents).unwrap(); + // BTreeMap makes the order of dependencies in the generated file deterministic + // by sorting alphabetically + assert_that(&contents[..], equal_to( +r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "foo" +version = "0.0.1" +authors = [] +exclude = ["*.txt"] +description = "foo" +license = "MIT" + +[package.metadata] +foo = "bar" +[dependencies.abc] +version = "1.0" + +[dependencies.bar] +version = "0.1" + +[dependencies.def] +version = "1.0" + +[dependencies.ghi] +version = "1.0" +"#)); +} + +#[test] +fn ignore_workspace_specifier() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + + authors = [] + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + "#) + .file("src/main.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("package").arg("--no-verify").cwd(p.root().join("bar")), + execs().with_status(0)); + + let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); + let mut rdr = GzDecoder::new(f).unwrap(); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let mut entry = ar.entries().unwrap() + .map(|f| f.unwrap()) + .find(|e| e.path().unwrap().ends_with("Cargo.toml")) + .unwrap(); + let mut contents = String::new(); + entry.read_to_string(&mut contents).unwrap(); + assert_that(&contents[..], equal_to( +r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "bar" +version = "0.1.0" +authors = [] +"#)); +} + +#[test] +fn package_two_kinds_of_deps() { + Package::new("other", "1.0.0").publish(); + Package::new("other1", "1.0.0").publish(); + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + other = "1.0" + other1 = { version = "1.0" } + "#) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("package").arg("--no-verify"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/patch.rs b/collector/compile-benchmarks/cargo/tests/patch.rs new file mode 100644 index 000000000..c8d5e9ca6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/patch.rs @@ -0,0 +1,798 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; +extern crate toml; + +use std::fs::{self, File}; +use std::io::{Read, Write}; + +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn replace() { + Package::new("foo", "0.1.0").publish(); + Package::new("deep-foo", "0.1.0") + .file("src/lib.rs", r#" + extern crate foo; + pub fn deep() { + foo::foo(); + } + "#) + .dep("foo", "0.1.0") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + deep-foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#) + .file("src/lib.rs", " + extern crate foo; + extern crate deep_foo; + pub fn bar() { + foo::foo(); + deep_foo::deep(); + } + ") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] deep-foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] deep-foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build"),//.env("RUST_LOG", "trace"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn nonexistent() { + Package::new("baz", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn patch_git() { + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + + [patch.'{0}'] + foo = {{ path = "foo" }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#" + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn patch_to_git() { + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ") + .build(); + + assert_that(p.cargo("build"),//.env("RUST_LOG", "cargo=trace"), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn unused() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#" + not rust code + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); + + // unused patch should be in the lock file + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + let toml: toml::Value = toml::from_str(&lock).unwrap(); + assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); + assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("foo")); + assert_eq!(toml["patch"]["unused"][0]["version"].as_str(), Some("0.2.0")); +} + +#[test] +fn unused_git() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn add_patch() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#)); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn add_ignored_patch() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#)); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]")); +} + +#[test] +fn new_minor() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.1 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn transitive_new_minor() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + subdir = { path = 'subdir' } + + [patch.crates-io] + foo = { path = 'foo' } + "#) + .file("src/lib.rs", "") + .file("subdir/Cargo.toml", r#" + [package] + name = "subdir" + version = "0.1.0" + authors = [] + + [dependencies] + foo = '0.1.0' + "#) + .file("subdir/src/lib.rs", r#""#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.1 [..] +[COMPILING] subdir v0.1.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn new_major() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.2.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + Package::new("foo", "0.2.0").publish(); + assert_that(p.cargo("update"), + execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2.0" + "#)); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.2.0 [..] +[COMPILING] foo v0.2.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn transitive_new_major() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + subdir = { path = 'subdir' } + + [patch.crates-io] + foo = { path = 'foo' } + "#) + .file("src/lib.rs", "") + .file("subdir/Cargo.toml", r#" + [package] + name = "subdir" + version = "0.1.0" + authors = [] + + [dependencies] + foo = '0.2.0' + "#) + .file("subdir/src/lib.rs", r#""#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.2.0 [..] +[COMPILING] subdir v0.1.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn remove_patch() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = { path = 'foo' } + bar = { path = 'bar' } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", r#""#) + .build(); + + // Generate a lock file where `bar` is unused + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file1 = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock_file1).unwrap(); + + // Remove `bar` and generate a new lock file form the old one + File::create(p.root().join("Cargo.toml")).unwrap().write_all(r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = { path = 'foo' } + "#.as_bytes()).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file2 = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock_file2).unwrap(); + + // Remove the lock file and build from scratch + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file3 = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock_file3).unwrap(); + + assert!(lock_file1.contains("bar")); + assert_eq!(lock_file2, lock_file3); + assert!(lock_file1 != lock_file2); +} + +#[test] +fn non_crates_io() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [patch.some-other-source] + foo = { path = 'foo' } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid url `some-other-source`: relative URL without a base +")); +} + +#[test] +fn replace_with_crates_io() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [patch.crates-io] + foo = "0.1" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[UPDATING] [..] +error: failed to resolve patches for `[..]` + +Caused by: + patch for `foo` in `[..]` points to the same source, but patches must point \ + to different sources +")); +} + +#[test] +fn patch_in_virtual() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [workspace] + members = ["bar"] + + [patch.crates-io] + foo = { path = "foo" } + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", r#""#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1" + "#) + .file("bar/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[FINISHED] [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/path.rs b/collector/compile-benchmarks/cargo/tests/path.rs new file mode 100644 index 000000000..0e6fd3373 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/path.rs @@ -0,0 +1,988 @@ +extern crate cargo; +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargo::util::process; +use cargotest::sleep_ms; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::{project, execs, main_file}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file}; + +#[test] +#[cfg(not(windows))] // I have no idea why this is failing spuriously on + // Windows, for more info see #3466. +fn cargo_compile_with_nested_deps_shorthand() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + + version = "0.5.0" + path = "baz" + + [lib] + + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#) + .file("bar/baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#) + .file("bar/baz/src/baz.rs", r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url()))); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("test passed\n").with_status(0)); + + println!("cleaning"); + assert_that(p.cargo("clean").arg("-v"), + execs().with_stdout("").with_status(0)); + println!("building baz"); + assert_that(p.cargo("build").arg("-p").arg("baz"), + execs().with_status(0) + .with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url()))); + println!("building foo"); + assert_that(p.cargo("build") + .arg("-p").arg("foo"), + execs().with_status(0) + .with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url()))); +} + +#[test] +fn cargo_compile_with_root_dev_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project("bar") + .file("Cargo.toml", r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101)) +} + +#[test] +fn cargo_compile_with_root_dev_deps_with_testing() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project("bar") + .file("Cargo.toml", r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_stderr("\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn cargo_compile_with_transitive_dev_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.baz] + + git = "git://example.com/path/to/nowhere" + + [lib] + + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url()))); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("zoidberg\n")); +} + +#[test] +fn no_rebuild_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar() } + "#) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + pub fn bar() {} + "#) + .build(); + // First time around we should compile both foo and bar + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url()))); + + sleep_ms(1000); + p.change_file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar(); } + "#); + // Don't compile bar, but do recompile foo. + assert_that(p.cargo("build"), + execs().with_stderr("\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n")); +} + +#[test] +fn deep_dependencies_trigger_rebuild() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar() } + "#) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#) + .file("bar/src/bar.rs", r#" + extern crate baz; + pub fn bar() { baz::baz() } + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "baz" + "#) + .file("baz/src/baz.rs", r#" + pub fn baz() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url()))); + assert_that(p.cargo("build"), + execs().with_stdout("")); + + // Make sure an update to baz triggers a rebuild of bar + // + // We base recompilation off mtime, so sleep for at least a second to ensure + // that this write will change the mtime. + sleep_ms(1000); + File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#" + pub fn baz() { println!("hello!"); } + "#).unwrap(); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url()))); + + // Make sure an update to bar doesn't trigger baz + sleep_ms(1000); + File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#" + extern crate baz; + pub fn bar() { println!("hello!"); baz::baz(); } + "#).unwrap(); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url()))); + +} + +#[test] +fn no_rebuild_two_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + [dependencies.baz] + path = "baz" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar() } + "#) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#) + .file("bar/src/bar.rs", r#" + pub fn bar() {} + "#) + .file("baz/Cargo.toml", r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "baz" + "#) + .file("baz/src/baz.rs", r#" + pub fn baz() {} + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url()))); + assert_that(&p.bin("foo"), existing_file()); + assert_that(p.cargo("build"), + execs().with_stdout("")); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn nested_deps_recompile() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "src/bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("src/bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "bar" + "#) + .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }") + .build(); + let bar = p.url(); + + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/src/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + bar, + p.url()))); + sleep_ms(1000); + + File::create(&p.root().join("src/main.rs")).unwrap().write_all(br#" + fn main() {} + "#).unwrap(); + + // This shouldn't recompile `bar` + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url()))); +} + +#[test] +fn error_message_for_missing_manifest() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + path = "src/bar" + "#) + .file("src/lib.rs", "") + .file("src/bar/not-a-manifest", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update file://[..] + +Caused by: + failed to read `[..]bar[/]Cargo.toml` + +Caused by: + [..] (os error [..]) +")); + +} + +#[test] +fn override_relative() { + let bar = project("bar") + .file("Cargo.toml", r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + .build(); + + fs::create_dir(&paths::root().join(".cargo")).unwrap(); + File::create(&paths::root().join(".cargo/config")).unwrap() + .write_all(br#"paths = ["bar"]"#).unwrap(); + + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + "#, bar.root().display())) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + +} + +#[test] +fn override_self() { + let bar = project("bar") + .file("Cargo.toml", r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "") + .build(); + + let p = project("foo"); + let root = p.root().clone(); + let p = p + .file(".cargo/config", &format!(r#" + paths = ['{}'] + "#, root.display())) + .file("Cargo.toml", &format!(r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + + "#, bar.root().display())) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn override_path_dep() { + let bar = project("bar") + .file("p1/Cargo.toml", r#" + [package] + name = "p1" + version = "0.5.0" + authors = [] + + [dependencies.p2] + path = "../p2" + "#) + .file("p1/src/lib.rs", "") + .file("p2/Cargo.toml", r#" + [package] + name = "p2" + version = "0.5.0" + authors = [] + "#) + .file("p2/src/lib.rs", "") + .build(); + + let p = project("foo") + .file(".cargo/config", &format!(r#" + paths = ['{}', '{}'] + "#, bar.root().join("p1").display(), + bar.root().join("p2").display())) + .file("Cargo.toml", &format!(r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.p2] + path = '{}' + + "#, bar.root().join("p2").display())) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + +} + +#[test] +fn path_dep_build_cmd() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#) + .file("src/main.rs", + &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#) + .file("bar/build.rs", r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#) + .file("bar/src/bar.rs.in", r#" + pub fn gimme() -> i32 { 0 } + "#).build(); + p.root().join("bar").move_into_the_past(); + + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url()))); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("0\n")); + + // Touching bar.rs.in should cause the `build` command to run again. + { + let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); + file.unwrap().write_all(br#"pub fn gimme() -> i32 { 1 }"#).unwrap(); + } + + assert_that(p.cargo("build"), + execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url()))); + + assert_that(process(&p.bin("foo")), + execs().with_stdout("1\n")); +} + +#[test] +fn dev_deps_no_rebuild_lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + path = "bar" + + [lib] + name = "foo" + doctest = false + "#) + .file("src/lib.rs", r#" + #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar; + #[cfg(not(test))] pub fn foo() { env!("FOO"); } + "#) + .file("bar/Cargo.toml", r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + assert_that(p.cargo("build") + .env("FOO", "bar"), + execs().with_status(0) + .with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url()))); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] [..] v0.5.0 ({url}[..]) +[COMPILING] [..] v0.5.0 ({url}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", url = p.url())) + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn custom_target_no_rebuild() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "a" } + [workspace] + members = ["a", "b"] + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + t!(fs::rename(p.root().join("target"), p.root().join("target_moved"))); + assert_that(p.cargo("build") + .arg("--manifest-path=b/Cargo.toml") + .env("CARGO_TARGET_DIR", "target_moved"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn override_and_depend() { + let p = project("foo") + .file("a/a1/Cargo.toml", r#" + [project] + name = "a1" + version = "0.5.0" + authors = [] + [dependencies] + a2 = { path = "../a2" } + "#) + .file("a/a1/src/lib.rs", "") + .file("a/a2/Cargo.toml", r#" + [project] + name = "a2" + version = "0.5.0" + authors = [] + "#) + .file("a/a2/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a1 = { path = "../a/a1" } + a2 = { path = "../a/a2" } + "#) + .file("b/src/lib.rs", "") + .file("b/.cargo/config", r#" + paths = ["../a"] + "#) + .build(); + assert_that(p.cargo("build").cwd(p.root().join("b")), + execs().with_status(0) + .with_stderr("\ +[COMPILING] a2 v0.5.0 ([..]) +[COMPILING] a1 v0.5.0 ([..]) +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn missing_path_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + paths = ["../whoa-this-does-not-exist"] + "#) + .build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \ +(defined in `[..]`) + +Caused by: + failed to read directory `[..]` + +Caused by: + [..] (os error [..]) +")); +} + +#[test] +fn invalid_path_dep_in_workspace_with_lockfile() { + Package::new("bar", "1.0.0").publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("foo/src/lib.rs", "") + .build(); + + // Generate a lock file + assert_that(p.cargo("build"), execs().with_status(0)); + + // Change the dependency on `bar` to an invalid path + File::create(&p.root().join("foo/Cargo.toml")).unwrap().write_all(br#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = { path = "" } + "#).unwrap(); + + // Make sure we get a nice error. In the past this actually stack + // overflowed! + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: no matching package named `bar` found (required by `foo`) +location searched: [..] +version required: * +")); +} + +#[test] +fn workspace_produces_rlib() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.root().join("target/debug/libtop.rlib"), existing_file()); + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + +} diff --git a/collector/compile-benchmarks/cargo/tests/plugins.rs b/collector/compile-benchmarks/cargo/tests/plugins.rs new file mode 100644 index 000000000..c10e47865 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/plugins.rs @@ -0,0 +1,378 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::fs; +use std::env; + +use cargotest::{is_nightly, rustc_host}; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn plugin_to_the_max() { + if !is_nightly() { return } + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo_lib" + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate foo_lib; + + fn main() { foo_lib::foo(); } + "#) + .file("src/foo_lib.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + + pub fn foo() {} + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#) + .file("src/lib.rs", r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(_reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#) + .build(); + let _baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + assert_that(foo.cargo("build"), + execs().with_status(0)); + assert_that(foo.cargo("doc"), + execs().with_status(0)); +} + +#[test] +fn plugin_with_dynamic_native_dependency() { + if !is_nightly() { return } + + let workspace = project("ws") + .file("Cargo.toml", r#" + [workspace] + members = ["builder", "foo"] + "#) + .build(); + + let build = project("ws/builder") + .file("Cargo.toml", r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + "#) + .file("src/lib.rs", r#" + #[no_mangle] + pub extern fn foo() {} + "#) + .build(); + + let foo = project("ws/foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = 'build.rs' + + [lib] + name = "bar" + plugin = true + "#) + .file("bar/build.rs", r#" + use std::path::PathBuf; + use std::env; + + fn main() { + let src = PathBuf::from(env::var("SRC").unwrap()); + println!("cargo:rustc-flags=-L {}/deps", src.parent().unwrap().display()); + } + "#) + .file("bar/src/lib.rs", r#" + #![feature(plugin_registrar, rustc_private)] + extern crate rustc_plugin; + + use rustc_plugin::Registry; + + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + + #[plugin_registrar] + pub fn bar(_reg: &mut Registry) { + unsafe { foo() } + } + "#) + .build(); + + assert_that(build.cargo("build"), + execs().with_status(0)); + + let src = workspace.root().join("target/debug"); + let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { + let lib = lib.file_name().unwrap().to_str().unwrap(); + lib.starts_with(env::consts::DLL_PREFIX) && + lib.ends_with(env::consts::DLL_SUFFIX) + }).unwrap(); + + assert_that(foo.cargo("build").env("SRC", &lib).arg("-v"), + execs().with_status(0)); +} + +#[test] +fn plugin_integration() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + doctest = false + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("tests/it_works.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn doctest_a_plugin() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#) + .file("src/lib.rs", r#" + #[macro_use] + extern crate bar; + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +// See #1515 +#[test] +fn native_plugin_dependency_with_custom_ar_linker() { + let target = rustc_host(); + + let _foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#) + .file("src/lib.rs", "") + .build(); + + let bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, target)) + .build(); + + assert_that(bar.cargo("build").arg("--verbose"), + execs().with_stderr_contains("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[ERROR] could not exec the linker [..] +")); +} + +#[test] +fn panic_abort_plugins() { + if !is_nightly() { + return + } + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + foo = { path = "foo" } + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#) + .file("foo/src/lib.rs", r#" + #![feature(rustc_private)] + extern crate syntax; + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn shared_panic_abort_plugins() { + if !is_nightly() { + return + } + + let p = project("top") + .file("Cargo.toml", r#" + [package] + name = "top" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + foo = { path = "foo" } + bar = { path = "bar" } + "#) + .file("src/lib.rs", " + extern crate bar; + ") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + + [dependencies] + bar = { path = "../bar" } + "#) + .file("foo/src/lib.rs", r#" + #![feature(rustc_private)] + extern crate syntax; + extern crate bar; + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/proc-macro.rs b/collector/compile-benchmarks/cargo/tests/proc-macro.rs new file mode 100644 index 000000000..953523ac6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/proc-macro.rs @@ -0,0 +1,281 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::is_nightly; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn probe_cfg_before_crate_type_discovery() { + if !is_nightly() { + return; + } + + let client = project("client") + .file("Cargo.toml", r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [target.'cfg(not(stage300))'.dependencies.noop] + path = "../noop" + "#) + .file("src/main.rs", r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#) + .build(); + let _noop = project("noop") + .file("Cargo.toml", r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#) + .file("src/lib.rs", r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#) + .build(); + + assert_that(client.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn noop() { + if !is_nightly() { + return; + } + + let client = project("client") + .file("Cargo.toml", r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [dependencies.noop] + path = "../noop" + "#) + .file("src/main.rs", r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#) + .build(); + let _noop = project("noop") + .file("Cargo.toml", r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#) + .file("src/lib.rs", r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#) + .build(); + + assert_that(client.cargo("build"), + execs().with_status(0)); + assert_that(client.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn impl_and_derive() { + if !is_nightly() { + return; + } + + let client = project("client") + .file("Cargo.toml", r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [dependencies.transmogrify] + path = "../transmogrify" + "#) + .file("src/main.rs", r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate transmogrify; + + trait ImplByTransmogrify { + fn impl_by_transmogrify(&self) -> bool; + } + + #[derive(Transmogrify, Debug)] + struct X { success: bool } + + fn main() { + let x = X::new(); + assert!(x.impl_by_transmogrify()); + println!("{:?}", x); + } + "#) + .build(); + let _transmogrify = project("transmogrify") + .file("Cargo.toml", r#" + [package] + name = "transmogrify" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#) + .file("src/lib.rs", r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Transmogrify)] + #[doc(hidden)] + pub fn transmogrify(input: TokenStream) -> TokenStream { + " + impl X { + fn new() -> Self { + X { success: true } + } + } + + impl ImplByTransmogrify for X { + fn impl_by_transmogrify(&self) -> bool { + true + } + } + ".parse().unwrap() + } + "#) + .build(); + + assert_that(client.cargo("build"), + execs().with_status(0)); + assert_that(client.cargo("run"), + execs().with_status(0).with_stdout("X { success: true }")); +} + +#[test] +fn plugin_and_proc_macro() { + if !is_nightly() { + return; + } + + let questionable = project("questionable") + .file("Cargo.toml", r#" + [package] + name = "questionable" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + proc-macro = true + "#) + .file("src/lib.rs", r#" + #![feature(plugin_registrar, rustc_private)] + #![feature(proc_macro, proc_macro_lib)] + + extern crate rustc_plugin; + use rustc_plugin::Registry; + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[plugin_registrar] + pub fn plugin_registrar(reg: &mut Registry) {} + + #[proc_macro_derive(Questionable)] + pub fn questionable(input: TokenStream) -> TokenStream { + input + } + "#) + .build(); + + let msg = " lib.plugin and lib.proc-macro cannot both be true"; + assert_that(questionable.cargo("build"), + execs().with_status(101).with_stderr_contains(msg)); +} + +#[test] +fn proc_macro_doctest() { + if !is_nightly() { + return + } + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#) + .file("src/lib.rs", r#" +#![feature(proc_macro, proc_macro_lib)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +/// ``` +/// assert!(true); +/// ``` +#[proc_macro_derive(Bar)] +pub fn derive(_input: TokenStream) -> TokenStream { + "".parse().unwrap() +} + +#[test] +fn a() { + assert!(true); +} +"#) + .build(); + + assert_that(foo.cargo("test"), + execs().with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains_n("test [..] ... ok", 2)); +} diff --git a/collector/compile-benchmarks/cargo/tests/profiles.rs b/collector/compile-benchmarks/cargo/tests/profiles.rs new file mode 100644 index 000000000..b051f1147 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/profiles.rs @@ -0,0 +1,286 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::env; + +use cargotest::is_nightly; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn profile_overrides() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 1 + debug = false + rpath = true + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=1 \ + -C debug-assertions=on \ + -C metadata=[..] \ + -C rpath \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [optimized] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +))); +} + +#[test] +fn opt_level_override_0() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 0 + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", +dir = p.root().display(), +url = p.url() +))); +} + +#[test] +fn debug_override_1() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 1 + "#) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=1 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", +dir = p.root().display(), +url = p.url() +))); +} + +fn check_opt_level_override(profile_level: &str, rustc_level: &str) { + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = {level} + "#, level = profile_level)) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level={level} \ + -C debuginfo=2 \ + -C debug-assertions=on \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", +dir = p.root().display(), +url = p.url(), +level = rustc_level +))); +} + +#[test] +fn opt_level_overrides() { + if !is_nightly() { return } + + for &(profile_level, rustc_level) in &[ + ("1", "1"), + ("2", "2"), + ("3", "3"), + ("\"s\"", "s"), + ("\"z\"", "z"), + ] { + check_opt_level_override(profile_level, rustc_level) + } +} + +#[test] +fn top_level_overrides_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 1 + debug = true + + [dependencies.foo] + path = "foo" + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 0 + debug = false + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#) + .file("foo/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] foo v0.0.0 ({url}/foo) +[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \ + --crate-type dylib --crate-type rlib \ + --emit=dep-info,link \ + -C prefer-dynamic \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern foo={dir}[/]target[/]release[/]deps[/]\ + {prefix}foo[..]{suffix} \ + --extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib` +[FINISHED] release [optimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX))); +} + +#[test] +fn profile_in_non_root_manifest_triggers_a_warning() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + + [profile.dev] + debug = false + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + + [profile.dev] + opt-level = 1 + "#) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").cwd(p.root().join("bar")).arg("-v"), + execs().with_status(0).with_stderr("\ +[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root: +package: [..] +workspace: [..] +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized] target(s) in [..]")); +} + +#[test] +fn profile_in_virtual_manifest_works() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar"] + + [profile.dev] + opt-level = 1 + debug = false + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").cwd(p.root().join("bar")).arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [optimized] target(s) in [..]")); +} diff --git a/collector/compile-benchmarks/cargo/tests/publish.rs b/collector/compile-benchmarks/cargo/tests/publish.rs new file mode 100644 index 000000000..d63087fe2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/publish.rs @@ -0,0 +1,502 @@ +extern crate cargotest; +extern crate flate2; +extern crate hamcrest; +extern crate tar; + +use std::io::prelude::*; +use std::fs::File; +use std::io::SeekFrom; + +use cargotest::support::git::repo; +use cargotest::support::paths; +use cargotest::support::{project, execs, publish}; +use flate2::read::GzDecoder; +use hamcrest::assert_that; +use tar::Archive; + +#[test] +fn simple() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").arg("--no-verify") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry()))); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | + ((sz[1] as u32) << 8) | + ((sz[2] as u32) << 16) | + ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f).unwrap(); + assert_eq!(rdr.header().filename().unwrap(), b"foo-0.0.1.crate"); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!(fname == b"foo-0.0.1/Cargo.toml" || + fname == b"foo-0.0.1/Cargo.toml.orig" || + fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", file.header().path()); + } +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[test] +fn simple_with_host() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").arg("--no-verify") + .arg("--host").arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!("\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index to which to publish. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry()))); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | + ((sz[1] as u32) << 8) | + ((sz[2] as u32) << 16) | + ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f).unwrap(); + assert_eq!(rdr.header().filename().unwrap(), "foo-0.0.1.crate".as_bytes()); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!(fname == b"foo-0.0.1/Cargo.toml" || + fname == b"foo-0.0.1/Cargo.toml.orig" || + fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", file.header().path()); + } +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[test] +fn simple_with_index_and_host() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").arg("--no-verify") + .arg("--index").arg(publish::registry().to_string()) + .arg("--host").arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!("\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index to which to publish. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry()))); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | + ((sz[1] as u32) << 8) | + ((sz[2] as u32) << 16) | + ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f).unwrap(); + assert_eq!(rdr.header().filename().unwrap(), "foo-0.0.1.crate".as_bytes()); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!(fname == b"foo-0.0.1/Cargo.toml" || + fname == b"foo-0.0.1/Cargo.toml.orig" || + fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", file.header().path()); + } +} + +#[test] +fn git_deps() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.foo] + git = "git://path/to/nowhere" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").arg("-v").arg("--no-verify") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[ERROR] crates cannot be published to crates.io with dependencies sourced from \ +a repository\neither publish `foo` as its own crate on crates.io and \ +specify a crates.io version as a dependency or pull it into this \ +repository and specify it with a path and version\n\ +(crate `foo` has repository path `git://path/to/nowhere`)\ +")); +} + +#[test] +fn path_dependency_no_version() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[ERROR] all path dependencies must have a version specified when publishing. +dependency `bar` does not specify a version +")); +} + +#[test] +fn unpublishable_crate() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = false + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(101).with_stderr("\ +[ERROR] some crates cannot be published. +`foo` is marked as unpublishable +")); +} + +#[test] +fn dont_publish_dirty() { + publish::setup(); + let p = project("foo") + .file("bar", "") + .build(); + + let _ = repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +bar + +to proceed despite this, pass the `--allow-dirty` flag +")); +} + +#[test] +fn publish_clean() { + publish::setup(); + + let p = project("foo").build(); + + let _ = repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0)); +} + +#[test] +fn publish_in_sub_repo() { + publish::setup(); + + let p = project("foo") + .file("baz", "") + .build(); + + let _ = repo(&paths::root().join("foo")) + .file("bar/Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").cwd(p.root().join("bar")) + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0)); +} + +#[test] +fn publish_when_ignored() { + publish::setup(); + + let p = project("foo") + .file("baz", "") + .build(); + + let _ = repo(&paths::root().join("foo")) + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .file(".gitignore", "baz") + .build(); + + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0)); +} + +#[test] +fn ignore_when_crate_ignored() { + publish::setup(); + + let p = project("foo") + .file("bar/baz", "") + .build(); + + let _ = repo(&paths::root().join("foo")) + .file(".gitignore", "bar") + .nocommit_file("bar/Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .nocommit_file("bar/src/main.rs", "fn main() {}"); + assert_that(p.cargo("publish").cwd(p.root().join("bar")) + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0)); +} + +#[test] +fn new_crate_rejected() { + publish::setup(); + + let p = project("foo") + .file("baz", "") + .build(); + + let _ = repo(&paths::root().join("foo")) + .nocommit_file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#) + .nocommit_file("src/main.rs", "fn main() {}"); + assert_that(p.cargo("publish") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(101)); +} + +#[test] +fn dry_run() { + publish::setup(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("publish").arg("--dry-run") + .arg("--index").arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[UPLOADING] foo v0.0.1 ({dir}) +[WARNING] aborting upload due to dry run +", + dir = p.url()))); + + // Ensure the API request wasn't actually made + assert!(!publish::upload_path().join("api/v1/crates/new").exists()); +} diff --git a/collector/compile-benchmarks/cargo/tests/read-manifest.rs b/collector/compile-benchmarks/cargo/tests/read-manifest.rs new file mode 100644 index 000000000..41ed14204 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/read-manifest.rs @@ -0,0 +1,96 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs, main_file, basic_bin_manifest}; +use hamcrest::{assert_that}; + +static MANIFEST_OUTPUT: &'static str = r#" +{ + "name":"foo", + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "license": null, + "license_file": null, + "description": null, + "source":null, + "dependencies":[], + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..][/]foo[/]src[/]foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml" +}"#; + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("read-manifest") + .arg("--manifest-path").arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("read-manifest") + .arg("--manifest-path").arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_parent_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("read-manifest") + .arg("--manifest-path").arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr("[ERROR] the manifest-path must be \ + a path to a Cargo.toml file")); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("read-manifest") + .arg("--manifest-path").arg(p.root()) + .cwd(p.root().parent().unwrap()), + execs().with_status(101) + .with_stderr("[ERROR] the manifest-path must be \ + a path to a Cargo.toml file")); +} + +#[test] +fn cargo_read_manifest_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("read-manifest") + .cwd(p.root()), + execs().with_status(0) + .with_json(MANIFEST_OUTPUT)); +} diff --git a/collector/compile-benchmarks/cargo/tests/registry.rs b/collector/compile-benchmarks/cargo/tests/registry.rs new file mode 100644 index 000000000..6334944d7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/registry.rs @@ -0,0 +1,1453 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; +extern crate url; + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cargotest::cargo_process; +use cargotest::support::git; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::{self, Package}; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; +use url::Url; + +fn registry_path() -> PathBuf { paths::root().join("registry") } +fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } + +#[test] +fn simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `{reg}` +[DOWNLOADING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry()))); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + // Don't download a second time + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); +} + +#[test] +fn deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `{reg}` +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry()))); +} + +#[test] +fn nonexistent() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + nonexistent = ">= 0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[ERROR] no matching package named `nonexistent` found (required by `foo`) +location searched: registry [..] +version required: >= 0.0.0 +")); +} + +#[test] +fn wrong_version() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + foo = ">= 1.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.0.1").publish(); + Package::new("foo", "0.0.2").publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] no matching version `>= 1.0.0` found for package `foo` (required by `foo`) +location searched: registry [..] +versions found: 0.0.2, 0.0.1 +")); + + Package::new("foo", "0.0.3").publish(); + Package::new("foo", "0.0.4").publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] no matching version `>= 1.0.0` found for package `foo` (required by `foo`) +location searched: registry [..] +versions found: 0.0.4, 0.0.3, 0.0.2, ... +")); +} + +#[test] +fn bad_cksum() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bad-cksum = ">= 0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + let pkg = Package::new("bad-cksum", "0.0.1"); + pkg.publish(); + t!(File::create(&pkg.archive_dst())); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry [..] +[DOWNLOADING] bad-cksum [..] +[ERROR] unable to get packages from source + +Caused by: + failed to download replaced source `registry https://[..]` + +Caused by: + failed to verify the checksum of `bad-cksum v0.0.1 (registry file://[..])` +")); +} + +#[test] +fn update_registry() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + notyet = ">= 0.0.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] no matching package named `notyet` found (required by `foo`) +location searched: registry [..] +version required: >= 0.0.0 +")); + + Package::new("notyet", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `{reg}` +[DOWNLOADING] notyet v0.0.1 (registry file://[..]) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry()))); +} + +#[test] +fn package_with_path_deps() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + + [dependencies.notyet] + version = "0.0.1" + path = "notyet" + "#) + .file("src/main.rs", "fn main() {}") + .file("notyet/Cargo.toml", r#" + [package] + name = "notyet" + version = "0.0.1" + authors = [] + "#) + .file("notyet/src/lib.rs", "") + .build(); + + assert_that(p.cargo("package").arg("-v"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] failed to verify package tarball + +Caused by: + no matching package named `notyet` found (required by `foo`) +location searched: registry [..] +version required: ^0.0.1 +")); + + Package::new("notyet", "0.0.1").publish(); + + assert_that(p.cargo("package"), + execs().with_status(0).with_stderr(format!("\ +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[UPDATING] registry `[..]` +[DOWNLOADING] notyet v0.0.1 (registry file://[..]) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", dir = p.url()))); +} + +#[test] +fn lockfile_locks() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + + p.root().move_into_the_past(); + Package::new("bar", "0.0.2").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn lockfile_locks_transitively() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + + p.root().move_into_the_past(); + Package::new("baz", "0.0.2").publish(); + Package::new("bar", "0.0.2").dep("baz", "*").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn yanks_are_not_used() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + Package::new("bar", "0.0.2").dep("baz", "*").yanked(true).publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); +} + +#[test] +fn relying_on_a_yank_is_bad() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] no matching version `= 0.0.2` found for package `baz` (required by `bar`) +location searched: registry [..] +versions found: 0.0.1 +")); +} + +#[test] +fn yanks_in_lockfiles_are_ok() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + registry::registry_path().join("3").rm_rf(); + + Package::new("bar", "0.0.1").yanked(true).publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); + + assert_that(p.cargo("update"), + execs().with_status(101).with_stderr_contains("\ +[ERROR] no matching package named `bar` found (required by `foo`) +location searched: registry [..] +version required: * +")); +} + +#[test] +fn update_with_lockfile_if_packages_missing() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + assert_that(p.cargo("build"), + execs().with_status(0)); + p.root().move_into_the_past(); + + paths::home().join(".cargo/registry").rm_rf(); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn update_lockfile() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + println!("0.0.1"); + Package::new("bar", "0.0.1").publish(); + assert_that(p.cargo("build"), + execs().with_status(0)); + + Package::new("bar", "0.0.2").publish(); + Package::new("bar", "0.0.3").publish(); + paths::home().join(".cargo/registry").rm_rf(); + println!("0.0.2 update"); + assert_that(p.cargo("update") + .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.1 -> v0.0.2 +")); + + println!("0.0.2 build"); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[DOWNLOADING] [..] v0.0.2 (registry file://[..]) +[COMPILING] bar v0.0.2 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + + println!("0.0.3 update"); + assert_that(p.cargo("update") + .arg("-p").arg("bar"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.2 -> v0.0.3 +")); + + println!("0.0.3 build"); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[DOWNLOADING] [..] v0.0.3 (registry file://[..]) +[COMPILING] bar v0.0.3 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + + println!("new dependencies update"); + Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); + Package::new("spam", "0.2.5").publish(); + assert_that(p.cargo("update") + .arg("-p").arg("bar"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.3 -> v0.0.4 +[ADDING] spam v0.2.5 +")); + + println!("new dependencies update"); + Package::new("bar", "0.0.5").publish(); + assert_that(p.cargo("update") + .arg("-p").arg("bar"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.4 -> v0.0.5 +[REMOVING] spam v0.2.5 +")); +} + +#[test] +fn dev_dependency_not_used() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); +} + +#[test] +fn login_with_no_cargo_dir() { + let home = paths::home().join("new-home"); + t!(fs::create_dir(&home)); + assert_that(cargo_process().arg("login").arg("foo").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn login_with_differently_sized_token() { + // Verify that the configuration file gets properly trunchated. + let home = paths::home().join("new-home"); + t!(fs::create_dir(&home)); + assert_that(cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"), + execs().with_status(0)); + assert_that(cargo_process().arg("login").arg("lmao").arg("-v"), + execs().with_status(0)); + assert_that(cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn bad_license_file() { + Package::new("foo", "1.0.0").publish(); + let p = project("all") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license-file = "foo" + description = "bar" + repository = "baz" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + assert_that(p.cargo("publish") + .arg("-v") + .arg("--index").arg(registry().to_string()), + execs().with_status(101) + .with_stderr_contains("\ +[ERROR] the license file `foo` does not exist")); +} + +#[test] +fn updating_a_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("a/src/lib.rs", "") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 +[COMPILING] a v0.0.1 ({dir}/a) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + + t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(br#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#)); + Package::new("bar", "0.1.0").publish(); + + println!("second"); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 (registry file://[..]) +[COMPILING] bar v0.1.0 +[COMPILING] a v0.0.1 ({dir}/a) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); +} + +#[test] +fn git_and_registry_dep() { + let b = git::repo(&paths::root().join("b")) + .file("Cargo.toml", r#" + [project] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + + [dependencies.b] + git = '{}' + "#, b.url())) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.0.1").publish(); + + p.root().move_into_the_past(); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] [..] +[UPDATING] [..] +[DOWNLOADING] a v0.0.1 (registry file://[..]) +[COMPILING] a v0.0.1 +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + p.root().move_into_the_past(); + + println!("second"); + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn update_publish_then_update() { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + Package::new("a", "0.1.0").publish(); + assert_that(p.cargo("build"), + execs().with_status(0)); + + // Next, publish a new package and back up the copy of the registry we just + // created. + Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + t!(fs::rename(®istry, &backup)); + + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project("foo2") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that(p2.cargo("build"), + execs().with_status(0)); + registry.rm_rf(); + t!(fs::rename(&backup, ®istry)); + t!(fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock"))); + + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr(&format!("\ +[UPDATING] [..] +[DOWNLOADING] a v0.1.1 (registry file://[..]) +[COMPILING] a v0.1.1 +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url()))); + +} + +#[test] +fn fetch_downloads() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that(p.cargo("fetch"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] a v0.1.0 (registry [..]) +")); +} + +#[test] +fn update_transitive_dependency() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").dep("b", "*").publish(); + Package::new("b", "0.1.0").publish(); + + assert_that(p.cargo("fetch"), + execs().with_status(0)); + + Package::new("b", "0.1.1").publish(); + + assert_that(p.cargo("update").arg("-pb"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] b v0.1.0 -> v0.1.1 +")); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[DOWNLOADING] b v0.1.1 (registry file://[..]) +[COMPILING] b v0.1.1 +[COMPILING] a v0.1.0 +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn update_backtracking_ok() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + webdriver = "0.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("webdriver", "0.1.0").dep("hyper", "0.6").publish(); + Package::new("hyper", "0.6.5").dep("openssl", "0.1") + .dep("cookie", "0.1") + .publish(); + Package::new("cookie", "0.1.0").dep("openssl", "0.1").publish(); + Package::new("openssl", "0.1.0").publish(); + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0)); + + Package::new("openssl", "0.1.1").publish(); + Package::new("hyper", "0.6.6").dep("openssl", "0.1.1") + .dep("cookie", "0.1.0") + .publish(); + + assert_that(p.cargo("update").arg("-p").arg("hyper"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +")); +} + +#[test] +fn update_multiple_packages() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "*" + b = "*" + c = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + Package::new("b", "0.1.0").publish(); + Package::new("c", "0.1.0").publish(); + + assert_that(p.cargo("fetch"), + execs().with_status(0)); + + Package::new("a", "0.1.1").publish(); + Package::new("b", "0.1.1").publish(); + Package::new("c", "0.1.1").publish(); + + assert_that(p.cargo("update").arg("-pa").arg("-pb"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] a v0.1.0 -> v0.1.1 +[UPDATING] b v0.1.0 -> v0.1.1 +")); + + assert_that(p.cargo("update").arg("-pb").arg("-pc"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[UPDATING] c v0.1.0 -> v0.1.1 +")); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr_contains("\ +[DOWNLOADING] a v0.1.1 (registry file://[..])") + .with_stderr_contains("\ +[DOWNLOADING] b v0.1.1 (registry file://[..])") + .with_stderr_contains("\ +[DOWNLOADING] c v0.1.1 (registry file://[..])") + .with_stderr_contains("\ +[COMPILING] a v0.1.1") + .with_stderr_contains("\ +[COMPILING] b v0.1.1") + .with_stderr_contains("\ +[COMPILING] c v0.1.1") + .with_stderr_contains("\ +[COMPILING] foo v0.5.0 ([..])")); +} + +#[test] +fn bundled_crate_in_registry() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.1" + baz = "0.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0") + .dep("bar", "0.1.0") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar", version = "0.1.0" } + "#) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .publish(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn update_same_prefix_oh_my_how_was_this_a_bug() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "ugh" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.1" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foobar", "0.2.0").publish(); + Package::new("foo", "0.1.0") + .dep("foobar", "0.2.0") + .publish(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(p.cargo("update").arg("-pfoobar").arg("--precise=0.2.0"), + execs().with_status(0)); +} + +#[test] +fn use_semver() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "1.2.3-alpha.0" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "1.2.3-alpha.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn only_download_relevant() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [target.foo.dependencies] + foo = "*" + [dev-dependencies] + bar = "*" + [dependencies] + baz = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] baz v0.1.0 ([..]) +[COMPILING] baz v0.1.0 +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn resolve_and_backtracking() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.1") + .feature_dep("bar", "0.1", &["a", "b"]) + .publish(); + Package::new("foo", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn upstream_warnings_on_extra_verbose() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0") + .file("src/lib.rs", "fn unused() {}") + .publish(); + + assert_that(p.cargo("build").arg("-vv"), + execs().with_status(0).with_stderr_contains("\ +[..]warning: function is never used[..] +")); +} + +#[test] +fn disallow_network() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--frozen"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry [..] + +Caused by: + attempting to make an HTTP request, but --frozen was specified +")); +} + +#[test] +fn add_dep_dont_update_registry() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#) + .file("src/main.rs", "fn main() {}") + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + remote = "0.3" + "#)); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn bump_version_dont_update_registry() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#) + .file("src/main.rs", "fn main() {}") + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#" + [project] + name = "bar" + version = "0.6.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#)); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] bar v0.6.0 ([..]) +[FINISHED] [..] +")); +} + +#[test] +fn old_version_req() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.2*" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.2.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[UPDATING] [..] +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +")); +} + +#[test] +fn old_version_req_upstream() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.3.0") + .file("Cargo.toml", r#" + [project] + name = "remote" + version = "0.3.0" + authors = [] + + [dependencies] + bar = "0.2*" + "#) + .file("src/lib.rs", "") + .publish(); + Package::new("bar", "0.2.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] [..] +[DOWNLOADING] [..] +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of remote 0.3.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +")); +} + +#[test] +fn toml_lies_but_index_is_truth() { + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.3.0") + .dep("foo", "0.2.0") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.3.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", "extern crate foo;") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.3" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn vv_prints_warnings() { + Package::new("foo", "0.2.0") + .file("src/lib.rs", r#" + #![deny(warnings)] + + fn foo() {} // unused function + "#) + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-vv"), + execs().with_status(0)); +} + +#[test] +fn bad_and_or_malicious_packages_rejected() { + Package::new("foo", "0.2.0") + .extra_file("foo-0.1.0/src/lib.rs", "") + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-vv"), + execs().with_status(101) + .with_stderr("\ +[UPDATING] [..] +[DOWNLOADING] [..] +error: unable to get packages from source + +Caused by: + failed to download [..] + +Caused by: + failed to unpack [..] + +Caused by: + [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\" +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/required-features.rs b/collector/compile-benchmarks/cargo/tests/required-features.rs new file mode 100644 index 000000000..0f71d6d93 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/required-features.rs @@ -0,0 +1,1000 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::is_nightly; +use cargotest::install::{cargo_home, has_installed_exe}; +use cargotest::support::{project, execs}; +use hamcrest::{assert_that, existing_file, not}; + +#[test] +fn build_bin_default_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#) + .file("src/main.rs", r#" + extern crate foo; + + #[cfg(feature = "a")] + fn test() { + foo::foo(); + } + + fn main() {} + "#) + .file("src/lib.rs", r#" + #[cfg(feature = "a")] + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(p.cargo("build").arg("--no-default-features"), + execs().with_status(0)); + + assert_that(p.cargo("build").arg("--bin=foo"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(p.cargo("build").arg("--bin=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); +} + +#[test] +fn build_bin_arg_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("a"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn build_bin_multiple_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(&p.bin("foo_1"), not(existing_file())); + assert_that(&p.bin("foo_2"), existing_file()); + + assert_that(p.cargo("build").arg("--features").arg("c"), + execs().with_status(0)); + + assert_that(&p.bin("foo_1"), existing_file()); + assert_that(&p.bin("foo_2"), existing_file()); + + assert_that(p.cargo("build").arg("--no-default-features"), + execs().with_status(0)); +} + +#[test] +fn build_example_default_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#) + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--example=foo"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); + + assert_that(p.cargo("build").arg("--example=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); +} + +#[test] +fn build_example_arg_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#) + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--example=foo").arg("--features").arg("a"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn build_example_multiple_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[example]] + name = "foo_1" + required-features = ["b", "c"] + + [[example]] + name = "foo_2" + required-features = ["a"] + "#) + .file("examples/foo_1.rs", "fn main() {}") + .file("examples/foo_2.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--example=foo_1"), + execs().with_status(101).with_stderr("\ +error: target `foo_1` requires the features: `b`, `c` +Consider enabling them by passing e.g. `--features=\"b c\"` +")); + assert_that(p.cargo("build").arg("--example=foo_2"), + execs().with_status(0)); + + assert_that(&p.bin("examples/foo_1"), not(existing_file())); + assert_that(&p.bin("examples/foo_2"), existing_file()); + + assert_that(p.cargo("build").arg("--example=foo_1") + .arg("--features").arg("c"), + execs().with_status(0)); + assert_that(p.cargo("build").arg("--example=foo_2") + .arg("--features").arg("c"), + execs().with_status(0)); + + assert_that(&p.bin("examples/foo_1"), existing_file()); + assert_that(&p.bin("examples/foo_2"), existing_file()); + + assert_that(p.cargo("build").arg("--example=foo_1") + .arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo_1` requires the features: `b`, `c` +Consider enabling them by passing e.g. `--features=\"b c\"` +")); + assert_that(p.cargo("build").arg("--example=foo_2") + .arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo_2` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); +} + +#[test] +fn test_default_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test test ... ok")); + + assert_that(p.cargo("test").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")) + .with_stdout("")); + + assert_that(p.cargo("test").arg("--test=foo"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]")) + .with_stdout_contains("test test ... ok")); + + assert_that(p.cargo("test").arg("--test=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); +} + +#[test] +fn test_arg_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + assert_that(p.cargo("test").arg("--features").arg("a"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test test ... ok")); +} + +#[test] +fn test_multiple_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[test]] + name = "foo_1" + required-features = ["b", "c"] + + [[test]] + name = "foo_2" + required-features = ["a"] + "#) + .file("tests/foo_1.rs", "#[test]\nfn test() {}") + .file("tests/foo_2.rs", "#[test]\nfn test() {}") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", p.url())) + .with_stdout_contains("test test ... ok")); + + assert_that(p.cargo("test").arg("--features").arg("c"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo_1-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", p.url())) + .with_stdout_contains_n("test test ... ok", 2)); + + assert_that(p.cargo("test").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")) + .with_stdout("")); +} + +#[test] +fn bench_default_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#) + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test bench ... bench: [..]")); + + assert_that(p.cargo("bench").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout("")); + + assert_that(p.cargo("bench").arg("--bench=foo"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")) + .with_stdout_contains("test bench ... bench: [..]")); + + assert_that(p.cargo("bench").arg("--bench=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); +} + +#[test] +fn bench_arg_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#) + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .build(); + + assert_that(p.cargo("bench").arg("--features").arg("a"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test bench ... bench: [..]")); +} + +#[test] +fn bench_multiple_required_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bench]] + name = "foo_1" + required-features = ["b", "c"] + + [[bench]] + name = "foo_2" + required-features = ["a"] + "#) + .file("benches/foo_1.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .file("benches/foo_2.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .build(); + + assert_that(p.cargo("bench"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", p.url())) + .with_stdout_contains("test bench ... bench: [..]")); + + assert_that(p.cargo("bench").arg("--features").arg("c"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo_1-[..][EXE] +[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", p.url())) + .with_stdout_contains_n("test bench ... bench: [..]", 2)); + + assert_that(p.cargo("bench").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout("")); +} + +#[test] +fn install_default_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + + [[example]] + name = "foo" + required-features = ["a"] + "#) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("install"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); + + assert_that(p.cargo("install").arg("--no-default-features"), + execs().with_status(101).with_stderr(format!("\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +"))); + assert_that(cargo_home(), not(has_installed_exe("foo"))); + + assert_that(p.cargo("install").arg("--bin=foo"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); + + assert_that(p.cargo("install").arg("--bin=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr(format!("\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +"))); + assert_that(cargo_home(), not(has_installed_exe("foo"))); + + assert_that(p.cargo("install").arg("--example=foo"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); + + assert_that(p.cargo("install").arg("--example=foo").arg("--no-default-features"), + execs().with_status(101).with_stderr(format!("\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +"))); + assert_that(cargo_home(), not(has_installed_exe("foo"))); +} + +#[test] +fn install_arg_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("install").arg("--features").arg("a"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn install_multiple_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("install"), + execs().with_status(0)); + assert_that(cargo_home(), not(has_installed_exe("foo_1"))); + assert_that(cargo_home(), has_installed_exe("foo_2")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); + + assert_that(p.cargo("install").arg("--features").arg("c"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo_1")); + assert_that(cargo_home(), has_installed_exe("foo_2")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); + + assert_that(p.cargo("install").arg("--no-default-features"), + execs().with_status(101).with_stderr("\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +")); + assert_that(cargo_home(), not(has_installed_exe("foo_1"))); + assert_that(cargo_home(), not(has_installed_exe("foo_2"))); +} + +#[test] +fn dep_feature_in_toml() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", features = ["a"] } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + // bin + assert_that(p.cargo("build").arg("--bin=foo"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + // example + assert_that(p.cargo("build").arg("--example=foo"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); + + // test + assert_that(p.cargo("test").arg("--test=foo"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test test ... ok")); + + // bench + if is_nightly() { + assert_that(p.cargo("bench").arg("--bench=foo"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] bar v0.0.1 ({0}/bar) +[COMPILING] foo v0.0.1 ({0}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test bench ... bench: [..]")); + } + + // install + assert_that(p.cargo("install"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn dep_feature_in_cmd_line() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file("benches/foo.rs", r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + // bin + assert_that(p.cargo("build").arg("--bin=foo"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `bar/a` +Consider enabling them by passing e.g. `--features=\"bar/a\"` +")); + + assert_that(p.cargo("build").arg("--bin=foo").arg("--features").arg("bar/a"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + // example + assert_that(p.cargo("build").arg("--example=foo"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `bar/a` +Consider enabling them by passing e.g. `--features=\"bar/a\"` +")); + + assert_that(p.cargo("build").arg("--example=foo").arg("--features").arg("bar/a"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); + + // test + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")) + .with_stdout("")); + + assert_that(p.cargo("test").arg("--test=foo").arg("--features").arg("bar/a"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test test ... ok")); + + // bench + if is_nightly() { + assert_that(p.cargo("bench"), + execs().with_status(0).with_stderr(format!("\ +[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout("")); + + assert_that(p.cargo("bench").arg("--bench=foo").arg("--features").arg("bar/a"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] bar v0.0.1 ({0}/bar) +[COMPILING] foo v0.0.1 ({0}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test bench ... bench: [..]")); + } + + // install + assert_that(p.cargo("install"), + execs().with_status(101).with_stderr(format!("\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +"))); + assert_that(cargo_home(), not(has_installed_exe("foo"))); + + assert_that(p.cargo("install").arg("--features").arg("bar/a"), + execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn test_skips_compiling_bin_with_missing_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "bin_foo" + path = "src/bin/foo.rs" + required-features = ["a"] + "#) + .file("src/bin/foo.rs", "extern crate bar; fn main() {}") + .file("tests/foo.rs", "") + .file("benches/foo.rs", "") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("test").arg("--features").arg("a").arg("-j").arg("1"), + execs().with_status(101).with_stderr_contains(format!("\ +[COMPILING] foo v0.0.1 ({}) +error[E0463]: can't find crate for `bar`", p.url()))); + + if is_nightly() { + assert_that(p.cargo("bench"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("bench").arg("--features").arg("a").arg("-j").arg("1"), + execs().with_status(101).with_stderr_contains(format!("\ +[COMPILING] foo v0.0.1 ({}) +error[E0463]: can't find crate for `bar`", p.url()))); + } +} + +#[test] +fn run_default() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = [] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that(p.cargo("run"), + execs().with_status(101).with_stderr("\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +")); + + assert_that(p.cargo("run").arg("--features").arg("a"), + execs().with_status(0)); +} + +#[test] +fn run_default_multiple_required_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + b = [] + + [[bin]] + name = "foo1" + path = "src/foo1.rs" + required-features = ["a"] + + [[bin]] + name = "foo2" + path = "src/foo2.rs" + required-features = ["b"] + "#) + .file("src/lib.rs", "") + .file("src/foo1.rs", "extern crate foo; fn main() {}") + .file("src/foo2.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that(p.cargo("run"), + execs().with_status(101).with_stderr("\ +error: `cargo run` requires that a project only have one executable; \ +use the `--bin` option to specify which one to run")); +} diff --git a/collector/compile-benchmarks/cargo/tests/resolve.rs b/collector/compile-benchmarks/cargo/tests/resolve.rs new file mode 100644 index 000000000..2b84106a6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/resolve.rs @@ -0,0 +1,412 @@ +#![deny(warnings)] + +extern crate hamcrest; +extern crate cargo; + +use std::collections::BTreeMap; + +use hamcrest::{assert_that, equal_to, contains, not}; + +use cargo::core::source::{SourceId, GitReference}; +use cargo::core::dependency::Kind::{self, Development}; +use cargo::core::{Dependency, PackageId, Summary, Registry}; +use cargo::util::{CargoResult, ToUrl}; +use cargo::core::resolver::{self, Method}; + +fn resolve(pkg: &PackageId, deps: Vec, registry: &[Summary]) + -> CargoResult> +{ + struct MyRegistry<'a>(&'a [Summary]); + impl<'a> Registry for MyRegistry<'a> { + fn query(&mut self, + dep: &Dependency, + f: &mut FnMut(Summary)) -> CargoResult<()> { + for summary in self.0.iter() { + if dep.matches(summary) { + f(summary.clone()); + } + } + Ok(()) + } + fn supports_checksums(&self) -> bool { false } + fn requires_precise(&self) -> bool { false } + } + let mut registry = MyRegistry(registry); + let summary = Summary::new(pkg.clone(), deps, BTreeMap::new()).unwrap(); + let method = Method::Everything; + let resolve = resolver::resolve(&[(summary, method)], &[], &mut registry, None)?; + let res = resolve.iter().cloned().collect(); + Ok(res) +} + +trait ToDep { + fn to_dep(self) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::for_registry(&url).unwrap(); + Dependency::parse_no_deprecated(self, Some("1.0.0"), &source_id).unwrap() + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } +} + +trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl ToPkgId for &'static str { + fn to_pkgid(&self) -> PackageId { + PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() + } +} + +impl ToPkgId for (&'static str, &'static str) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = *self; + PackageId::new(name, vers, ®istry_loc()).unwrap() + } +} + +macro_rules! pkg { + ($pkgid:expr => [$($deps:expr),+]) => ({ + let d: Vec = vec![$($deps.to_dep()),+]; + + Summary::new($pkgid.to_pkgid(), d, BTreeMap::new()).unwrap() + }); + + ($pkgid:expr) => ( + Summary::new($pkgid.to_pkgid(), Vec::new(), BTreeMap::new()).unwrap() + ) +} + +fn registry_loc() -> SourceId { + let remote = "http://example.com".to_url().unwrap(); + SourceId::for_registry(&remote).unwrap() +} + +fn pkg(name: &str) -> Summary { + Summary::new(pkg_id(name), Vec::new(), BTreeMap::new()).unwrap() +} + +fn pkg_id(name: &str) -> PackageId { + PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.to_url(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); + + PackageId::new(name, "1.0.0", &source_id).unwrap() +} + +fn pkg_loc(name: &str, loc: &str) -> Summary { + Summary::new(pkg_id_loc(name, loc), Vec::new(), BTreeMap::new()).unwrap() +} + +fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") } +fn dep_req(name: &str, req: &str) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::for_registry(&url).unwrap(); + Dependency::parse_no_deprecated(name, Some(req), &source_id).unwrap() +} + +fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.to_url().unwrap(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&url, master).unwrap(); + Dependency::parse_no_deprecated(name, Some("1.0.0"), &source_id).unwrap() +} +fn dep_kind(name: &str, kind: Kind) -> Dependency { + dep(name).set_kind(kind).clone() +} + +fn registry(pkgs: Vec

) -> Vec { + pkgs +} + +fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names.iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() +} + +#[test] +fn test_resolving_empty_dependency_list() { + let res = resolve(&pkg_id("root"), Vec::new(), + ®istry(vec![])).unwrap(); + + assert_that(&res, equal_to(&names(&["root"]))); +} + +#[test] +fn test_resolving_only_package() { + let reg = registry(vec![pkg("foo")]); + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®); + + assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_one_dep() { + let reg = registry(vec![pkg("foo"), pkg("bar")]); + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®); + + assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_multiple_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); + let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")], + ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "baz"])).exactly()); +} + +#[test] +fn test_resolving_transitive_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); + let res = resolve(&pkg_id("root"), vec![dep("bar")], ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_common_transitive_deps() { + let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); + let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")], + ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_with_same_name() { + let list = vec![pkg_loc("foo", "http://first.example.com"), + pkg_loc("bar", "http://second.example.com")]; + + let reg = registry(list); + let res = resolve(&pkg_id("root"), + vec![dep_loc("foo", "http://first.example.com"), + dep_loc("bar", "http://second.example.com")], + ®); + + let mut names = loc_names(&[("foo", "http://first.example.com"), + ("bar", "http://second.example.com")]); + + names.push(pkg_id("root")); + + assert_that(&res.unwrap(), contains(names).exactly()); +} + +#[test] +fn test_resolving_with_dev_deps() { + let reg = registry(vec![ + pkg!("foo" => ["bar", dep_kind("baz", Development)]), + pkg!("baz" => ["bat", dep_kind("bam", Development)]), + pkg!("bar"), + pkg!("bat") + ]); + + let res = resolve(&pkg_id("root"), + vec![dep("foo"), dep_kind("baz", Development)], + ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar", "baz"]))); +} + +#[test] +fn resolving_with_many_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + ]); + + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.2")]))); +} + +#[test] +fn resolving_with_specific_version() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")], + ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.1")]))); +} + +#[test] +fn test_resolving_maximum_version_with_transitive_deps() { + let reg = registry(vec![ + pkg!(("util", "1.2.2")), + pkg!(("util", "1.0.0")), + pkg!(("util", "1.1.1")), + pkg!("foo" => [dep_req("util", "1.0.0")]), + pkg!("bar" => [dep_req("util", ">=1.0.1")]), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], + ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("util", "1.2.2")]))); + assert_that(&res, not(contains(names(&[("util", "1.0.1")])))); + assert_that(&res, not(contains(names(&[("util", "1.1.1")])))); +} + +#[test] +fn resolving_incompat_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + pkg!("bar" => [dep_req("foo", "=1.0.2")]), + ]); + + assert!(resolve(&pkg_id("root"), vec![ + dep_req("foo", "=1.0.1"), + dep("bar"), + ], ®).is_err()); +} + +#[test] +fn resolving_backtrack() { + let reg = registry(vec![ + pkg!(("foo", "1.0.2") => [dep("bar")]), + pkg!(("foo", "1.0.1") => [dep("baz")]), + pkg!("bar" => [dep_req("foo", "=2.0.2")]), + pkg!("baz"), + ]); + + let res = resolve(&pkg_id("root"), vec![ + dep_req("foo", "^1"), + ], ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.1"), + ("baz", "1.0.0")]))); +} + +#[test] +fn resolving_allows_multiple_compatible_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.0")), + pkg!(("foo", "2.0.0")), + pkg!(("foo", "0.1.0")), + pkg!(("foo", "0.2.0")), + + pkg!("bar" => ["d1", "d2", "d3", "d4"]), + pkg!("d1" => [dep_req("foo", "1")]), + pkg!("d2" => [dep_req("foo", "2")]), + pkg!("d3" => [dep_req("foo", "0.1")]), + pkg!("d4" => [dep_req("foo", "0.2")]), + ]); + + let res = resolve(&pkg_id("root"), vec![ + dep("bar"), + ], ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.0"), + ("foo", "2.0.0"), + ("foo", "0.1.0"), + ("foo", "0.2.0"), + ("d1", "1.0.0"), + ("d2", "1.0.0"), + ("d3", "1.0.0"), + ("d4", "1.0.0"), + ("bar", "1.0.0")]))); +} + +#[test] +fn resolving_with_deep_backtracking() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + ]); + + let res = resolve(&pkg_id("root"), vec![ + dep_req("foo", "1"), + ], ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "2.0.0"), + ("baz", "1.0.1")]))); +} + +#[test] +fn resolving_but_no_exists() { + let reg = registry(vec![ + ]); + + let res = resolve(&pkg_id("root"), vec![ + dep_req("foo", "1"), + ], ®); + assert!(res.is_err()); + + assert_eq!(res.err().unwrap().to_string(), "\ +no matching package named `foo` found (required by `root`) +location searched: registry http://example.com/ +version required: ^1\ +"); +} + +#[test] +fn resolving_cycle() { + let reg = registry(vec![ + pkg!("foo" => ["foo"]), + ]); + + let _ = resolve(&pkg_id("root"), vec![ + dep_req("foo", "1"), + ], ®); +} + +#[test] +fn hard_equality() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.0")), + + pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), + ]); + + let res = resolve(&pkg_id("root"), vec![ + dep_req("bar", "1"), + dep_req("foo", "=1.0.0"), + ], ®).unwrap(); + + assert_that(&res, contains(names(&[("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0")]))); +} diff --git a/collector/compile-benchmarks/cargo/tests/run.rs b/collector/compile-benchmarks/cargo/tests/run.rs new file mode 100644 index 000000000..449c91caf --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/run.rs @@ -0,0 +1,883 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use cargo::util::paths::dylib_path_envvar; +use cargotest::support::{project, execs, path2url}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +hello +")); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +#[ignore] +fn simple_implicit_main() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello world"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bins"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +hello +")); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn simple_quiet() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("-q"), + execs().with_status(0).with_stdout("\ +hello +") + ); +} + +#[test] +fn simple_quiet_and_verbose() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("-q").arg("-v"), + execs().with_status(101).with_stderr("\ +[ERROR] cannot set both --verbose and --quiet +")); +} + +#[test] +fn quiet_and_verbose_config() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file(".cargo/config", r#" + [term] + verbose = true + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("-q"), + execs().with_status(0)); +} + +#[test] +fn simple_with_args() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + assert_eq!(std::env::args().nth(1).unwrap(), "hello"); + assert_eq!(std::env::args().nth(2).unwrap(), "world"); + } + "#) + .build(); + + assert_that(p.cargo("run").arg("hello").arg("world"), + execs().with_status(0)); +} + +#[test] +fn exit_code() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { std::process::exit(2); } + "#) + .build(); + + let mut output = String::from("\ +[COMPILING] foo v0.0.1 (file[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +"); + if !cfg!(unix) { + output.push_str("\ +[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2) +"); + } + assert_that(p.cargo("run"), + execs().with_status(2).with_stderr(output)); +} + +#[test] +fn exit_code_verbose() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { std::process::exit(2); } + "#) + .build(); + + let mut output = String::from("\ +[COMPILING] foo v0.0.1 (file[..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +"); + if !cfg!(unix) { + output.push_str("\ +[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2) +"); + } + + assert_that(p.cargo("run").arg("-v"), + execs().with_status(2).with_stderr(output)); +} + +#[test] +fn no_main_file() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("run"), + execs().with_status(101) + .with_stderr("[ERROR] a bin target must be available \ + for `cargo run`\n")); +} + +#[test] +#[ignore] +fn no_main_file_implicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("run").arg("--bins"), + execs().with_status(101) + .with_stderr("[ERROR] a bin target must be available \ + for `cargo run`\n")); +} + +#[test] +fn too_many_bins() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "") + .file("src/bin/b.rs", "") + .build(); + + assert_that(p.cargo("run"), + execs().with_status(101) + .with_stderr("[ERROR] `cargo run` requires that a project only \ + have one executable; use the `--bin` option \ + to specify which one to run\n")); +} + +#[test] +#[ignore] +fn too_many_bins_implicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "") + .file("src/bin/b.rs", "") + .build(); + + assert_that(p.cargo("run").arg("--bins"), + execs().with_status(101) + .with_stderr("[ERROR] `cargo run` requires that a project only \ + have one executable; use the `--bin` option \ + to specify which one to run\n")); +} + +#[test] +fn specify_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello a.rs"); } + "#) + .file("src/bin/b.rs", r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello b.rs"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("a").arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[RUNNING] `rustc [..] src[/]bin[/]a.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]a[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +hello a.rs +")); + + assert_that(p.cargo("run").arg("--bin").arg("b").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src[/]bin[/]b.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]b[EXE]`") + .with_stdout("\ +hello b.rs +")); +} + +#[test] +fn run_example() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("examples/a.rs", r#" + fn main() { println!("example"); } + "#) + .file("src/bin/a.rs", r#" + fn main() { println!("bin"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--example").arg("a"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +example +")); +} + +#[test] +#[ignore] +fn run_bin_implicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("examples/a.rs", r#" + fn main() { println!("example"); } + "#) + .file("src/bin/a.rs", r#" + fn main() { println!("bin"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bins"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +bin +")); +} + +#[test] +#[ignore] +fn run_example_implicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("examples/a.rs", r#" + fn main() { println!("example"); } + "#) + .file("src/bin/a.rs", r#" + fn main() { println!("bin"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--examples"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +example +")); +} + +#[test] +fn run_with_filename() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#) + .file("examples/a.rs", r#" + fn main() { println!("example"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("bin.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no bin target named `bin.rs`")); + + assert_that(p.cargo("run").arg("--bin").arg("a.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?")); + + assert_that(p.cargo("run").arg("--example").arg("example.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no example target named `example.rs`")); + + assert_that(p.cargo("run").arg("--example").arg("a.rs"), + execs().with_status(101).with_stderr("\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?")); +} + +#[test] +fn either_name_or_example() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/a.rs", r#" + fn main() { println!("hello a.rs"); } + "#) + .file("examples/b.rs", r#" + fn main() { println!("hello b.rs"); } + "#) + .build(); + + assert_that(p.cargo("run").arg("--bin").arg("a").arg("--example").arg("b"), + execs().with_status(101) + .with_stderr("[ERROR] `cargo run` can run at most one \ + executable, but multiple were \ + specified")); +} + +#[test] +fn one_bin_multiple_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/bin/main.rs", r#" + fn main() { println!("hello main.rs"); } + "#) + .file("examples/a.rs", r#" + fn main() { println!("hello a.rs"); } + "#) + .file("examples/b.rs", r#" + fn main() { println!("hello b.rs"); } + "#) + .build(); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]main[EXE]`", dir = path2url(p.root()))) + .with_stdout("\ +hello main.rs +")); +} + +#[test] +fn example_with_release_flag() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "*" + path = "bar" + "#) + .file("examples/a.rs", r#" + extern crate bar; + + fn main() { + if cfg!(debug_assertions) { + println!("slow1") + } else { + println!("fast1") + } + bar::baz(); + } + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + "#) + .file("bar/src/bar.rs", r#" + pub fn baz() { + if cfg!(debug_assertions) { + println!("slow2") + } else { + println!("fast2") + } + } + "#) + .build(); + + assert_that(p.cargo("run").arg("-v").arg("--release").arg("--example").arg("a"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name a examples[/]a.rs --crate-type bin \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]examples \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern bar={dir}[/]target[/]release[/]deps[/]libbar-[..].rlib` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target[/]release[/]examples[/]a[EXE]` +", + dir = p.root().display(), + url = path2url(p.root()), + )) + .with_stdout("\ +fast1 +fast2")); + + assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]debug[/]deps \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name a examples[/]a.rs --crate-type bin \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]debug[/]examples \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]libbar-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]` +", + dir = p.root().display(), + url = path2url(p.root()), + )) + .with_stdout("\ +slow1 +slow2")); +} + +#[test] +fn run_dylib_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { bar::bar(); } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate-type = ["dylib"] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("run").arg("hello").arg("world"), + execs().with_status(0)); +} + +#[test] +fn release_works() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { if cfg!(debug_assertions) { panic!() } } + "#) + .build(); + + assert_that(p.cargo("run").arg("--release"), + execs().with_status(0).with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target[/]release[/]foo[EXE]` +", + dir = path2url(p.root()), + ))); + assert_that(&p.release_bin("foo"), existing_file()); +} + +#[test] +fn run_bin_different_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "bar" + "#) + .file("src/bar.rs", r#" + fn main() { } + "#) + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn dashes_are_forwarded() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "bar" + "#) + .file("src/main.rs", r#" + fn main() { + let s: Vec = std::env::args().collect(); + assert_eq!(s[1], "a"); + assert_eq!(s[2], "--"); + assert_eq!(s[3], "b"); + } + "#) + .build(); + + assert_that(p.cargo("run").arg("--").arg("a").arg("--").arg("b"), + execs().with_status(0)); +} + +#[test] +fn run_from_executable_folder() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .build(); + + let cwd = p.root().join("target").join("debug"); + p.cargo("build").exec_with_output().unwrap(); + + assert_that(p.cargo("run").cwd(cwd), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\ +[RUNNING] `.[/]foo[EXE]`") + .with_stdout("\ +hello +")); +} + +#[test] +fn run_with_library_paths() { + let p = project("foo"); + + // Only link search directories within the target output directory are + // propagated through to dylib_path_envvar() (see #3366). + let mut dir1 = p.target_debug_dir(); + dir1.push("foo\\backslash"); + + let mut dir2 = p.target_debug_dir(); + dir2.push("dir=containing=equal=signs"); + + let p = p + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", &format!(r##" + fn main() {{ + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search={}"#); + }} + "##, dir1.display(), dir2.display())) + .file("src/main.rs", &format!(r##" + fn main() {{ + let search_path = std::env::var_os("{}").unwrap(); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(paths.contains(&r#"{}"#.into())); + assert!(paths.contains(&r#"{}"#.into())); + }} + "##, dylib_path_envvar(), dir1.display(), dir2.display())) + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn fail_no_extra_verbose() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + std::process::exit(1); + } + "#) + .build(); + + assert_that(p.cargo("run").arg("-q"), + execs().with_status(1) + .with_stdout("") + .with_stderr("")); +} + +#[test] +fn run_multiple_packages() { + let p = project("foo") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "../d3" } # outside of the workspace + + [[bin]] + name = "foo" + "#) + .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }") + .file("foo/d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("foo/d1/src/lib.rs", "") + .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("foo/d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + "#) + .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .file("d3/Cargo.toml", r#" + [package] + name = "d3" + version = "0.0.1" + authors = [] + "#) + .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + let cargo = || { + let mut process_builder = p.cargo("run"); + process_builder.cwd(p.root().join("foo")); + process_builder + }; + + assert_that(cargo().arg("-p").arg("d1"), + execs().with_status(0).with_stdout("d1")); + + assert_that(cargo().arg("-p").arg("d2").arg("--bin").arg("d2"), + execs().with_status(0).with_stdout("d2")); + + assert_that(cargo(), + execs().with_status(0).with_stdout("foo")); + + assert_that(cargo().arg("-p").arg("d1").arg("-p").arg("d2"), + execs() + .with_status(1) + .with_stderr_contains("[ERROR] Invalid arguments.")); + + assert_that(cargo().arg("-p").arg("d3"), + execs() + .with_status(101) + .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace")); +} diff --git a/collector/compile-benchmarks/cargo/tests/rustc.rs b/collector/compile-benchmarks/cargo/tests/rustc.rs new file mode 100644 index 000000000..da6aced75 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/rustc.rs @@ -0,0 +1,397 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +const CARGO_RUSTC_ERROR: &'static str = +"[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering +the package by passing e.g. `--lib` or `--bin NAME` to specify a single target"; + +#[test] +fn build_lib_for_foo() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("--lib").arg("-v"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.root().display(), url = p.url()))); +} + +#[test] +fn lib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("--lib").arg("-v") + .arg("--").arg("-C").arg("debug-assertions=off"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions=off \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.root().display(), url = p.url()))) +} + +#[test] +fn build_main_and_allow_unstable_options() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("-v").arg("--bin").arg("foo") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(0) + .with_stderr(&format!("\ +[COMPILING] {name} v{version} ({url}) +[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[RUNNING] `rustc --crate-name {name} src[/]main.rs --crate-type bin \ + --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern {name}={dir}[/]target[/]debug[/]deps[/]lib{name}-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), url = p.url(), + name = "foo", version = "0.0.1"))); +} + +#[test] +fn fails_when_trying_to_build_main_and_lib_with_args() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("-v") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(101) + .with_stderr(CARGO_RUSTC_ERROR)); +} + +#[test] +fn build_with_args_to_one_of_multiple_binaries() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/foo.rs", r#" + fn main() {} + "#) + .file("src/bin/bar.rs", r#" + fn main() {} + "#) + .file("src/bin/baz.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("-v").arg("--bin").arg("bar") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar src[/]bin[/]bar.rs --crate-type bin --emit=dep-info,link \ + -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = p.url()))); +} + +#[test] +fn fails_with_args_to_all_binaries() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/foo.rs", r#" + fn main() {} + "#) + .file("src/bin/bar.rs", r#" + fn main() {} + "#) + .file("src/bin/baz.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("-v") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(101) + .with_stderr(CARGO_RUSTC_ERROR)); +} + +#[test] +fn build_with_args_to_one_of_multiple_tests() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("tests/foo.rs", r#" "#) + .file("tests/bar.rs", r#" "#) + .file("tests/baz.rs", r#" "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustc").arg("-v").arg("--test").arg("bar") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar tests[/]bar.rs --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions [..]--test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = p.url()))); +} + +#[test] +fn build_foo_with_bar_dependency() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + bar::baz() + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustc").arg("-v").arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `[..] -C debuginfo=2 [..]` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))); +} + +#[test] +fn build_only_bar_dependency() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + bar::baz() + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar") + .arg("--").arg("-C").arg("debug-assertions"), + execs() + .with_status(0) + .with_stderr("\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc --crate-name bar [..] --crate-type lib [..] -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn fail_with_multiple_packages() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .build(); + + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from bar!"); } + } + "#) + .build(); + + let _baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from baz!"); } + } + "#) + .build(); + + assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar") + .arg("-p").arg("baz"), + execs().with_status(1).with_stderr("\ +[ERROR] Invalid arguments. + +Usage: + cargo rustc [options] [--] [...]")); +} + +#[test] +fn rustc_with_other_profile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + a = { path = "a" } + "#) + .file("src/main.rs", r#" + #[cfg(test)] extern crate a; + + #[test] + fn foo() {} + "#) + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("rustc").arg("--profile").arg("test"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/rustdoc.rs b/collector/compile-benchmarks/cargo/tests/rustdoc.rs new file mode 100644 index 000000000..371937ccb --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/rustdoc.rs @@ -0,0 +1,170 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{execs, project}; +use hamcrest::{assert_that}; + +#[test] +fn rustdoc_simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustdoc").arg("-v"), + execs() + .with_status(0) + .with_stderr(format!("\ +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.root().display(), url = p.url()))); +} + +#[test] +fn rustdoc_args() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"), + execs() + .with_status(0) + .with_stderr(format!("\ +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.root().display(), url = p.url()))); +} + + + +#[test] +fn rustdoc_foo_with_bar_dependency() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"), + execs() + .with_status(0) + .with_stderr(format!("\ +[COMPILING] bar v0.0.1 ([..]) +[RUNNING] `rustc [..]bar[/]src[/]lib.rs [..]` +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = foo.root().display(), url = foo.url()))); +} + +#[test] +fn rustdoc_only_bar_dependency() { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#) + .file("src/main.rs", r#" + extern crate bar; + fn main() { + bar::baz() + } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn baz() {} + "#) + .build(); + + assert_that(foo.cargo("rustdoc").arg("-v").arg("-p").arg("bar") + .arg("--").arg("--cfg=foo"), + execs() + .with_status(0) + .with_stderr(format!("\ +[DOCUMENTING] bar v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name bar [..]bar[/]src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = foo.root().display()))); +} + + +#[test] +fn rustdoc_same_name_err() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that(p.cargo("rustdoc").arg("-v") + .arg("--").arg("--cfg=foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] The target `foo` is specified as a \ +library and as a binary by package `foo [..]`. It can be documented[..]")); +} diff --git a/collector/compile-benchmarks/cargo/tests/rustdocflags.rs b/collector/compile-benchmarks/cargo/tests/rustdocflags.rs new file mode 100644 index 000000000..e1385d3e6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/rustdocflags.rs @@ -0,0 +1,88 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn parses_env() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustdoc [..] --cfg=foo[..]` +")); +} + +#[test] +fn parses_config() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + rustdocflags = ["--cfg", "foo"] + "#) + .build(); + + assert_that(p.cargo("doc").arg("-v"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] `rustdoc [..] --cfg foo[..]` +")); +} + +#[test] +fn bad_flags() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--bogus"), + execs().with_status(101)); +} + +#[test] +fn rerun() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"), + execs().with_status(0)); + assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"), + execs().with_status(0).with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=bar"), + execs().with_status(0).with_stderr("\ +[DOCUMENTING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} diff --git a/collector/compile-benchmarks/cargo/tests/rustflags.rs b/collector/compile-benchmarks/cargo/tests/rustflags.rs new file mode 100644 index 000000000..144dad5c2 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/rustflags.rs @@ -0,0 +1,1161 @@ +extern crate cargotest; +extern crate hamcrest; + +use std::io::Write; +use std::fs::{self, File}; + +use cargotest::rustc_host; +use cargotest::support::{project, project_in_home, execs, paths}; +use hamcrest::assert_that; + +#[test] +fn env_rustflags_normal_source() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file("benches/d.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .build(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--lib"), + execs().with_status(101)); + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--bin=a"), + execs().with_status(101)); + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--example=b"), + execs().with_status(101)); + assert_that(p.cargo("test").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101)); + assert_that(p.cargo("bench").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101)); +} + +#[test] +fn env_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#) + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#) + .build(); + + assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#) + .file("src/lib.rs", r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#) + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", r#" + fn foo() { } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#) + .build(); + + assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_normal_source_with_target() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file("benches/d.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .build(); + + let host = &rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--lib").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--bin=a").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus") + .arg("--example=b").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("test").env("RUSTFLAGS", "-Z bogus") + .arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("bench").env("RUSTFLAGS", "-Z bogus") + .arg("--target").arg(host), + execs().with_status(101)); +} + +#[test] +fn env_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#) + .file("src/lib.rs", r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", r#" + fn foo() { } + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn env_rustflags_recompile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + // Setting RUSTFLAGS forces a recompile + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101)); +} + +#[test] +fn env_rustflags_recompile2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); + // Setting RUSTFLAGS forces a recompile + assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101)); +} + +#[test] +fn env_rustflags_no_recompile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_stdout("").with_status(0)); +} + +#[test] +fn build_rustflags_normal_source() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file("benches/d.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .file(".cargo/config", r#" + [build] + rustflags = ["-Z", "bogus"] + "#) + .build(); + + assert_that(p.cargo("build") + .arg("--lib"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--bin=a"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--example=b"), + execs().with_status(101)); + assert_that(p.cargo("test"), + execs().with_status(101)); + assert_that(p.cargo("bench"), + execs().with_status(101)); +} + +#[test] +fn build_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#) + .build(); + + assert_that(foo.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#) + .file("src/lib.rs", r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", r#" + fn foo() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#) + .build(); + + assert_that(foo.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_normal_source_with_target() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file("benches/d.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .file(".cargo/config", r#" + [build] + rustflags = ["-Z", "bogus"] + "#) + .build(); + + let ref host = rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that(p.cargo("build") + .arg("--lib").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--bin=a").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--example=b").arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("test") + .arg("--target").arg(host), + execs().with_status(101)); + assert_that(p.cargo("bench") + .arg("--target").arg(host), + execs().with_status(101)); +} + +#[test] +fn build_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + let host = rustc_host(); + assert_that(p.cargo("build") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", "") + .file("build.rs", r#" + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(foo.cargo("build") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#) + .file("src/lib.rs", r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + let host = rustc_host(); + assert_that(p.cargo("build") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#) + .file("src/lib.rs", r#" + fn foo() { } + "#) + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + let _bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#) + .file("src/lib.rs", r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#) + .build(); + + let host = rustc_host(); + assert_that(foo.cargo("build") + .arg("--target").arg(host), + execs().with_status(0)); +} + +#[test] +fn build_rustflags_recompile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(101)); +} + +#[test] +fn build_rustflags_recompile2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(101)); +} + +#[test] +fn build_rustflags_no_recompile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0)); + assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_stdout("").with_status(0)); +} + +#[test] +fn build_rustflags_with_home_config() { + // We need a config file inside the home directory + let home = paths::home(); + let home_config = home.join(".cargo"); + fs::create_dir(&home_config).unwrap(); + File::create(&home_config.join("config")).unwrap().write_all(br#" + [build] + rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"] + "#).unwrap(); + + // And we need the project to be inside the home directory + // so the walking process finds the home project twice. + let p = project_in_home("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn target_rustflags_normal_source() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file("benches/d.rs", r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#) + .file(".cargo/config", &format!(" + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", rustc_host())) + .build(); + + assert_that(p.cargo("build") + .arg("--lib"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--bin=a"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--example=b"), + execs().with_status(101)); + assert_that(p.cargo("test"), + execs().with_status(101)); + assert_that(p.cargo("bench"), + execs().with_status(101)); +} + +// target.{}.rustflags takes precedence over build.rustflags +#[test] +fn target_rustflags_precedence() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(" + [build] + rustflags = [\"--cfg\", \"foo\"] + + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", rustc_host())) + .build(); + + assert_that(p.cargo("build") + .arg("--lib"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--bin=a"), + execs().with_status(101)); + assert_that(p.cargo("build") + .arg("--example=b"), + execs().with_status(101)); + assert_that(p.cargo("test"), + execs().with_status(101)); + assert_that(p.cargo("bench"), + execs().with_status(101)); +} + +#[test] +fn cfg_rustflags_normal_source() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file(".cargo/config", &format!(r#" + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, if rustc_host().contains("-windows-") {"windows"} else {"not(windows)"})) + .build(); + + assert_that(p.cargo("build").arg("--lib").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").arg("--bin=a").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").arg("--example=b").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("bench").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +")); + +} + +// target.'cfg(...)'.rustflags takes precedence over build.rustflags +#[test] +fn cfg_rustflags_precedence() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file(".cargo/config", &format!(r#" + [build] + rustflags = ["--cfg", "foo"] + + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, if rustc_host().contains("-windows-") { "windows" } else { "not(windows)" })) + .build(); + + assert_that(p.cargo("build").arg("--lib").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").arg("--bin=a").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").arg("--example=b").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("bench").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +")); + +} + +#[test] +fn target_rustflags_string_and_array_form1() { + let p1 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + rustflags = ["--cfg", "foo"] + "#) + .build(); + + assert_that(p1.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + let p2 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [build] + rustflags = "--cfg foo" + "#) + .build(); + + assert_that(p2.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + +} + +#[test] +fn target_rustflags_string_and_array_form2() { + let p1 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file(".cargo/config", &format!(r#" + [target.{}] + rustflags = ["--cfg", "foo"] + "#, rustc_host())) + .file("src/lib.rs", "") + .build(); + + assert_that(p1.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + let p2 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file(".cargo/config", &format!(r#" + [target.{}] + rustflags = "--cfg foo" + "#, rustc_host())) + .file("src/lib.rs", "") + .build(); + + assert_that(p2.cargo("build").arg("-v"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + +} diff --git a/collector/compile-benchmarks/cargo/tests/search.rs b/collector/compile-benchmarks/cargo/tests/search.rs new file mode 100644 index 000000000..9410ce7e7 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/search.rs @@ -0,0 +1,280 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; +extern crate url; + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cargo::util::ProcessBuilder; +use cargotest::support::execs; +use cargotest::support::git::repo; +use cargotest::support::paths; +use hamcrest::assert_that; +use url::Url; + +fn registry_path() -> PathBuf { paths::root().join("registry") } +fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } +fn api_path() -> PathBuf { paths::root().join("api") } +fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() } + +fn setup() { + let config = paths::root().join(".cargo/config"); + fs::create_dir_all(config.parent().unwrap()).unwrap(); + fs::create_dir_all(&api_path().join("api/v1")).unwrap(); + + let _ = repo(®istry_path()) + .file("config.json", &format!(r#"{{ + "dl": "{0}", + "api": "{0}" + }}"#, api())) + .build(); +} + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut b = cargotest::cargo_process(); + b.arg(s); + b +} + +#[test] +fn simple() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap() + .write_all(contents.as_bytes()).unwrap(); + } + + assert_that(cargo_process("search").arg("postgres") + .arg("--index").arg(registry().to_string()), + execs().with_status(0) + .with_stdout_contains("\ +hoare = \"0.1.1\" # Design by contract style assertions for Rust")); +} + +// TODO: Depricated +// remove once it has been decided '--host' can be safely removed +#[test] +fn simple_with_host() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap() + .write_all(contents.as_bytes()).unwrap(); + } + + assert_that(cargo_process("search").arg("postgres") + .arg("--host").arg(registry().to_string()), + execs().with_status(0) + .with_stderr(&format!("\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +depricated. The flag is being renamed to 'index', as the flag +wants the location of the index in which to search. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +", + reg = registry())) + .with_stdout_contains("\ +hoare = \"0.1.1\" # Design by contract style assertions for Rust")); +} + +// TODO: Depricated +// remove once it has been decided '--host' can be safely removed +#[test] +fn simple_with_index_and_host() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap() + .write_all(contents.as_bytes()).unwrap(); + } + + assert_that(cargo_process("search").arg("postgres") + .arg("--index").arg(registry().to_string()) + .arg("--host").arg(registry().to_string()), + execs().with_status(0) + .with_stderr(&format!("\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +depricated. The flag is being renamed to 'index', as the flag +wants the location of the index in which to search. Please +use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +", + reg = registry())) + .with_stdout_contains("\ +hoare = \"0.1.1\" # Design by contract style assertions for Rust")); +} + +#[test] +fn multiple_query_params() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres+sql&per_page=10")).unwrap() + .write_all(contents.as_bytes()).unwrap(); + } + + assert_that(cargo_process("search").arg("postgres").arg("sql") + .arg("--index").arg(registry().to_string()), + execs().with_status(0) + .with_stdout_contains("\ +hoare = \"0.1.1\" # Design by contract style assertions for Rust")); +} + +#[test] +fn help() { + assert_that(cargo_process("search").arg("-h"), + execs().with_status(0)); + assert_that(cargo_process("help").arg("search"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/small-fd-limits.rs b/collector/compile-benchmarks/cargo/tests/small-fd-limits.rs new file mode 100644 index 000000000..e997f76fd --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/small-fd-limits.rs @@ -0,0 +1,109 @@ +extern crate cargotest; +extern crate git2; +extern crate hamcrest; +extern crate url; + +use std::env; +use std::ffi::OsStr; +use std::path::PathBuf; +use std::process::Command; + +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use cargotest::support::paths; +use cargotest::support::git; +use hamcrest::assert_that; + +use url::Url; + +fn find_index() -> PathBuf { + let dir = paths::home().join(".cargo/registry/index"); + dir.read_dir().unwrap().next().unwrap().unwrap().path() +} + +fn run_test(path_env: Option<&OsStr>) { + const N: usize = 50; + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .build(); + Package::new("bar", "0.1.0").publish(); + + assert_that(foo.cargo("build"), + execs().with_status(0)); + + let index = find_index(); + let path = paths::home().join("tmp"); + let url = Url::from_file_path(&path).unwrap().to_string(); + let repo = git2::Repository::init(&path).unwrap(); + let index = git2::Repository::open(&index).unwrap(); + let mut cfg = repo.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + let mut cfg = index.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + + for _ in 0..N { + git::commit(&repo); + index.remote_anonymous(&url).unwrap() + .fetch(&["refs/heads/master:refs/remotes/foo/master"], + None, + None).unwrap(); + } + drop((repo, index)); + Package::new("bar", "0.1.1").publish(); + + let before = find_index().join(".git/objects/pack") + .read_dir().unwrap() + .count(); + assert!(before > N); + + let mut cmd = foo.cargo("update"); + cmd.env("__CARGO_PACKFILE_LIMIT", "10"); + if let Some(path) = path_env { + cmd.env("PATH", path); + } + cmd.env("RUST_LOG", "trace"); + assert_that(cmd, execs().with_status(0)); + let after = find_index().join(".git/objects/pack") + .read_dir().unwrap() + .count(); + assert!(after < before, + "packfiles before: {}\n\ + packfiles after: {}", before, after); +} + +#[test] +fn use_git_gc() { + if Command::new("git").arg("--version").output().is_err() { + return + } + run_test(None); +} + +#[test] +// it looks like this test passes on some windows machines but not others, +// notably not on AppVeyor's machines. Sounds like another but for another day. +#[cfg_attr(windows, ignore)] +fn avoid_using_git() { + let path = env::var_os("PATH").unwrap_or_default(); + let mut paths = env::split_paths(&path).collect::>(); + let idx = paths.iter().position(|p| { + p.join("git").exists() || p.join("git.exe").exists() + }); + match idx { + Some(i) => { paths.remove(i); } + None => return, + } + run_test(Some(&env::join_paths(&paths).unwrap())); +} diff --git a/collector/compile-benchmarks/cargo/tests/test.rs b/collector/compile-benchmarks/cargo/tests/test.rs new file mode 100644 index 000000000..9f48164f3 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/test.rs @@ -0,0 +1,2937 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use std::fs::File; +use std::io::prelude::*; +use std::str; + +use cargotest::{sleep_ms, is_nightly, rustc_host}; +use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest, cargo_exe}; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, is_not}; +use cargo::util::process; + +#[test] +fn cargo_test_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "hello") + }"#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n")); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.5.0 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url())) + .with_stdout_contains("test test_hello ... ok")); +} + +#[test] +fn cargo_test_release() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() { bar::bar(); } + + #[test] + fn test() { foo(); } + "#) + .file("tests/test.rs", r#" + extern crate foo; + + #[test] + fn test() { foo::foo(); } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("test").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.1.0 ({dir}) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE]` +[RUNNING] `[..]target[/]release[/]deps[/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]lib.rs[..]`", dir = p.url())) + .with_stdout_contains_n("test test ... ok", 2) + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn cargo_test_overflow_checks() { + if !is_nightly() { + return; + } + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + + [[bin]] + name = "foo" + + [profile.release] + overflow-checks = true + "#) + .file("src/foo.rs", r#" + use std::panic; + pub fn main() { + let r = panic::catch_unwind(|| { + [1, i32::max_value()].iter().sum::(); + }); + assert!(r.is_err()); + }"#) + .build(); + + assert_that(p.cargo("build").arg("--release"), + execs().with_status(0)); + assert_that(&p.release_bin("foo"), existing_file()); + + assert_that(process(&p.release_bin("foo")), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn cargo_test_verbose() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn main() {} + #[test] fn test_hello() {} + "#) + .build(); + + assert_that(p.cargo("test").arg("-v").arg("hello"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]deps[/]foo-[..][EXE] hello`", url = p.url())) + .with_stdout_contains("test test_hello ... ok")); +} + +#[test] +fn many_similar_names() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + pub fn foo() {} + #[test] fn lib_test() {} + ") + .file("src/main.rs", " + extern crate foo; + fn main() {} + #[test] fn bin_test() { foo::foo() } + ") + .file("tests/foo.rs", r#" + extern crate foo; + #[test] fn test_test() { foo::foo() } + "#) + .build(); + + let output = p.cargo("test").arg("-v").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!(output.contains("test bin_test"), "bin_test missing\n{}", output); + assert!(output.contains("test lib_test"), "lib_test missing\n{}", output); + assert!(output.contains("test test_test"), "test_test missing\n{}", output); +} + +#[test] +fn cargo_test_failing_test_in_bin() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "nope") + }"#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n")); + + assert_that(p.cargo("test"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[ERROR] test failed, to rerun pass '--bin foo'", url = p.url())) + .with_stdout_contains(" +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed:[..]") + .with_stdout_contains("[..]`(left == right)`[..]") + .with_stdout_contains("[..]left: `\"hello\"`,[..]") + .with_stdout_contains("[..]right: `\"nope\"`[..]") + .with_stdout_contains("[..]src[/]main.rs:12[..]") + .with_stdout_contains("\ +failures: + test_hello +") + .with_status(101)); +} + +#[test] +fn cargo_test_failing_test_in_test() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#" + pub fn main() { + println!("hello"); + }"#) + .file("tests/footest.rs", r#" + #[test] + fn test_hello() { + assert!(false) + }"#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n")); + + assert_that(p.cargo("test"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]footest-[..][EXE] +[ERROR] test failed, to rerun pass '--test footest'", url = p.url())) + .with_stdout_contains("running 0 tests") + .with_stdout_contains("\ +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed: false', \ + tests[/]footest.rs:4[..] +") + .with_stdout_contains("\ +failures: + test_hello +") + .with_status(101)); +} + +#[test] +fn cargo_test_failing_test_in_lib() { + let p = project("foo") + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", r#" + #[test] + fn test_hello() { + assert!(false) + }"#) + .build(); + + assert_that(p.cargo("test"), + execs().with_stderr(format!("\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[ERROR] test failed, to rerun pass '--lib'", url = p.url())) + .with_stdout_contains("\ +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed: false', \ + src[/]lib.rs:4[..] +") + .with_stdout_contains("\ +failures: + test_hello +") + .with_status(101)); +} + + +#[test] +fn test_with_lib_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#) + .file("src/lib.rs", r#" + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{:?}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[test] fn lib_test() {} + "#) + .file("src/main.rs", " + #[allow(unused_extern_crates)] + extern crate foo; + + fn main() {} + + #[test] + fn bin_test() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]baz-[..][EXE] +[DOCTEST] foo", p.url())) + .with_stdout_contains("test lib_test ... ok") + .with_stdout_contains("test bin_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3)); +} + +#[test] +fn test_with_deep_lib_dep() { + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#) + .file("src/lib.rs", " + #[cfg(test)] + extern crate foo; + /// ``` + /// bar::bar(); + /// ``` + pub fn bar() {} + + #[test] + fn bar_test() { + foo::foo(); + } + ") + .build(); + let _p2 = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + pub fn foo() {} + + #[test] + fn foo_test() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..] +[DOCTEST] bar", dir = p.url())) + .with_stdout_contains("test bar_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 2)); +} + +#[test] +fn external_test_explicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + path = "src/test.rs" + "#) + .file("src/lib.rs", r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#) + .file("src/test.rs", r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE] +[DOCTEST] foo", p.url())) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn external_test_named_test() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + "#) + .file("src/lib.rs", "") + .file("tests/test.rs", r#" + #[test] + fn foo() { } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0)) +} + +#[test] +fn external_test_implicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#) + .file("tests/external.rs", r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]external-[..][EXE] +[DOCTEST] foo", p.url())) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn dont_run_examples() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + "#) + .file("examples/dont-run-me-i-will-fail.rs", r#" + fn main() { panic!("Examples should not be run by 'cargo test'"); } + "#) + .build(); + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn pass_through_command_line() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #[test] fn foo() {} + #[test] fn bar() {} + ") + .build(); + + assert_that(p.cargo("test").arg("bar"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", dir = p.url())) + .with_stdout_contains("test bar ... ok") + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("test").arg("foo"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo") + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests")); +} + +// Regression test for running cargo-test twice with +// tests in an rlib +#[test] +fn cargo_test_twice() { + let p = project("test_twice") + .file("Cargo.toml", &basic_lib_manifest("test_twice")) + .file("src/test_twice.rs", r#" + #![crate_type = "rlib"] + + #[test] + fn dummy_test() { } + "#) + .build(); + + p.cargo("build"); + + for _ in 0..2 { + assert_that(p.cargo("test"), + execs().with_status(0)); + } +} + +#[test] +fn lib_bin_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#) + .file("src/lib.rs", " + #[test] fn lib_test() {} + ") + .file("src/main.rs", " + #[allow(unused_extern_crates)] + extern crate foo; + + #[test] + fn bin_test() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", p.url())) + .with_stdout_contains_n("test [..] ... ok", 2) + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn lib_with_standard_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[test] + fn foo_test() {} + ") + .file("tests/test.rs", " + extern crate syntax; + + #[test] + fn test() { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE] +[DOCTEST] syntax", dir = p.url())) + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3)); +} + +#[test] +fn lib_with_standard_name2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + test = false + doctest = false + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url())) + .with_stdout_contains("test test ... ok")); +} + +#[test] +fn lib_without_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url())) + .with_stdout_contains("test test ... ok")); +} + +#[test] +fn bin_without_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bin]] + path = "src/main.rs" + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target bin.name is required")); +} + +#[test] +fn bench_without_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bench]] + path = "src/bench.rs" + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .file("src/bench.rs", " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + benchmark target bench.name is required")); +} + +#[test] +fn test_without_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[test]] + path = "src/test.rs" + "#) + .file("src/lib.rs", r#" + pub fn foo() {} + pub fn get_hello() -> &'static str { "Hello" } + "#) + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .file("src/test.rs", r#" + extern crate syntax; + + #[test] + fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + test target test.name is required")); +} + +#[test] +fn example_without_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[example]] + path = "examples/example.rs" + "#) + .file("src/lib.rs", " + pub fn foo() {} + ") + .file("src/main.rs", " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ") + .file("examples/example.rs", r#" + extern crate syntax; + + fn main() { + println!("example1"); + } + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(101) + .with_stderr("\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + example target example.name is required")); +} + +#[test] +fn bin_there_for_integration() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/main.rs", " + fn main() { std::process::exit(101); } + #[test] fn main_test() {} + ") + .file("tests/foo.rs", r#" + use std::process::Command; + #[test] + fn test_test() { + let status = Command::new("target/debug/foo").status().unwrap(); + assert_eq!(status.code(), Some(101)); + } + "#) + .build(); + + let output = p.cargo("test").arg("-v").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!(output.contains("main_test ... ok"), "no main_test\n{}", output); + assert!(output.contains("test_test ... ok"), "no test_test\n{}", output); +} + +#[test] +fn test_dylib() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#) + .file("tests/test.rs", r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#) + .file("bar/src/lib.rs", " + pub fn baz() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]", dir = p.url())) + .with_stdout_contains_n("test foo ... ok", 2)); + + p.root().move_into_the_past(); + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]") + .with_stdout_contains_n("test foo ... ok", 2)); +} + +#[test] +fn test_twice_with_build_cmd() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", " + #[test] + fn foo() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", dir = p.url())) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo") + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn test_then_build() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #[test] + fn foo() {} + ") + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", dir = p.url())) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests")); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stdout("")); +} + +#[test] +fn test_no_run() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", " + #[test] + fn foo() { panic!() } + ") + .build(); + + assert_that(p.cargo("test").arg("--no-run"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url()))); +} + +#[test] +fn test_run_specific_bin_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="bin1" + path="src/bin1.rs" + + [[bin]] + name="bin2" + path="src/bin2.rs" + "#) + .file("src/bin1.rs", "#[test] fn test1() { }") + .file("src/bin2.rs", "#[test] fn test2() { }") + .build(); + + assert_that(prj.cargo("test").arg("--bin").arg("bin2"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]bin2-[..][EXE]", dir = prj.url())) + .with_stdout_contains("test test2 ... ok")); +} + +#[test] +fn test_run_implicit_bin_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#) + .file("src/mybin.rs", "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }") + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file("examples/myexm.rs", "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }") + .build(); + + assert_that(prj.cargo("test").arg("--bins"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]", dir = prj.url())) + .with_stdout_contains("test test_in_bin ... ok")); +} + +#[test] +fn test_run_specific_test_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/a.rs", "fn main() { }") + .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") + .file("tests/a.rs", "#[test] fn test_a() { }") + .file("tests/b.rs", "#[test] fn test_b() { }") + .build(); + + assert_that(prj.cargo("test").arg("--test").arg("b"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]", dir = prj.url())) + .with_stdout_contains("test test_b ... ok")); +} + +#[test] +fn test_run_implicit_test_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#) + .file("src/mybin.rs", "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }") + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file("examples/myexm.rs", "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }") + .build(); + + assert_that(prj.cargo("test").arg("--tests"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mytest-[..][EXE]", dir = prj.url())) + .with_stdout_contains("test test_in_test ... ok")); +} + +#[test] +fn test_run_implicit_bench_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#) + .file("src/mybin.rs", "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }") + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file("examples/myexm.rs", "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }") + .build(); + + assert_that(prj.cargo("test").arg("--benches"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mybench-[..][EXE]", dir = prj.url())) + .with_stdout_contains("test test_in_bench ... ok")); +} + +#[test] +fn test_run_implicit_example_target() { + let prj = project("foo") + .file("Cargo.toml" , r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#) + .file("src/mybin.rs", "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }") + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file("examples/myexm.rs", "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }") + .build(); + + assert_that(prj.cargo("test").arg("--examples"), + execs().with_status(0) + .with_stderr(format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", dir = prj.url()))); +} + +#[test] +fn test_no_harness() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + test = false + + [[test]] + name = "bar" + path = "foo.rs" + harness = false + "#) + .file("src/main.rs", "fn main() {}") + .file("foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("test").arg("--").arg("--nocapture"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]bar-[..][EXE] +", + dir = p.url()))); +} + +#[test] +fn selective_testing() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#) + .file("src/lib.rs", "") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "#[allow(unused_extern_crates)] extern crate d1; fn main() {}") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#) + .file("d2/src/lib.rs", "") + .file("d2/src/main.rs", "#[allow(unused_extern_crates)] extern crate d2; fn main() {}"); + let p = p.build(); + + println!("d1"); + assert_that(p.cargo("test").arg("-p").arg("d1"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] d1 v0.0.1 ({dir}/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]", dir = p.url())) + .with_stdout_contains_n("running 0 tests", 2)); + + println!("d2"); + assert_that(p.cargo("test").arg("-p").arg("d2"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] d2 v0.0.1 ({dir}/d2) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]", dir = p.url())) + .with_stdout_contains_n("running 0 tests", 2)); + + println!("whole"); + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", dir = p.url())) + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn almost_cyclic_but_not_quite() { + let p = project("a") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dev-dependencies.b] + path = "b" + [dev-dependencies.c] + path = "c" + "#) + .file("src/lib.rs", r#" + #[cfg(test)] extern crate b; + #[cfg(test)] extern crate c; + "#) + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = ".." + "#) + .file("b/src/lib.rs", r#" + #[allow(unused_extern_crates)] + extern crate a; + "#) + .file("c/Cargo.toml", r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + "#) + .file("c/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn build_then_selective_test() { + let p = project("a") + .file("Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "b" + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate b;") + .file("src/main.rs", r#" + #[allow(unused_extern_crates)] + extern crate b; + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + "#) + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + p.root().move_into_the_past(); + assert_that(p.cargo("test").arg("-p").arg("b"), + execs().with_status(0)); +} + +#[test] +fn example_dev_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + "#) + .file("src/lib.rs", r#" + "#) + .file("examples/e1.rs", r#" + extern crate bar; + fn main() { } + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + // make sure this file takes awhile to compile + macro_rules! f0( () => (1) ); + macro_rules! f1( () => ({(f0!()) + (f0!())}) ); + macro_rules! f2( () => ({(f1!()) + (f1!())}) ); + macro_rules! f3( () => ({(f2!()) + (f2!())}) ); + macro_rules! f4( () => ({(f3!()) + (f3!())}) ); + macro_rules! f5( () => ({(f4!()) + (f4!())}) ); + macro_rules! f6( () => ({(f5!()) + (f5!())}) ); + macro_rules! f7( () => ({(f6!()) + (f6!())}) ); + macro_rules! f8( () => ({(f7!()) + (f7!())}) ); + pub fn bar() { + f8!(); + } + "#) + .build(); + assert_that(p.cargo("test"), + execs().with_status(0)); + assert_that(p.cargo("run") + .arg("--example").arg("e1").arg("--release").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn selective_testing_with_docs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + "#) + .file("src/lib.rs", r#" + /// ``` + /// not valid rust + /// ``` + pub fn foo() {} + "#) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + path = "d1.rs" + "#) + .file("d1/d1.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-p").arg("d1"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] d1 v0.0.1 ({dir}/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d1[..][EXE] +[DOCTEST] d1", dir = p.url())) + .with_stdout_contains_n("running 0 tests", 2)); +} + +#[test] +fn example_bin_same_name() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + assert_that(p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0) + .with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", dir = p.url()))); + + assert_that(&p.bin("foo"), is_not(existing_file())); + assert_that(&p.bin("examples/foo"), existing_file()); + + assert_that(p.process(&p.bin("examples/foo")), + execs().with_status(0).with_stdout("example\n")); + + assert_that(p.cargo("run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..]") + .with_stdout("\ +bin +")); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn test_with_example_twice() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + println!("first"); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); + println!("second"); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn example_with_dev_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + test = false + doctest = false + + [dev-dependencies.a] + path = "a" + "#) + .file("src/lib.rs", "") + .file("examples/ex.rs", "#[allow(unused_extern_crates)] extern crate a; fn main() {}") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0) + .with_stderr("\ +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name ex [..] --extern a=[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn bin_is_preserved() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + println!("testing"); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn bad_example() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("run").arg("--example").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] no example target named `foo` +")); + assert_that(p.cargo("run").arg("--bin").arg("foo"), + execs().with_status(101).with_stderr("\ +[ERROR] no bin target named `foo` +")); +} + +#[test] +fn doctest_feature() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [features] + bar = [] + "#) + .file("src/lib.rs", r#" + /// ```rust + /// assert_eq!(foo::foo(), 1); + /// ``` + #[cfg(feature = "bar")] + pub fn foo() -> i32 { 1 } + "#) + .build(); + + assert_that(p.cargo("test").arg("--features").arg("bar"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE] +[DOCTEST] foo") + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok")); +} + +#[test] +fn dashes_to_underscores() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo-bar" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + /// ``` + /// assert_eq!(foo_bar::foo(), 1); + /// ``` + pub fn foo() -> i32 { 1 } + "#) + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn doctest_dev_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + b = { path = "b" } + "#) + .file("src/lib.rs", r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#) + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn filter_no_doc_tests() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#) + .file("tests/foo.rs", "") + .build(); + + assert_that(p.cargo("test").arg("--test=foo"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]") + .with_stdout_contains("running 0 tests")); +} + +#[test] +fn dylib_doctest() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["rlib", "dylib"] + test = false + "#) + .file("src/lib.rs", r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo") + .with_stdout_contains("test [..] ... ok")); +} + +#[test] +fn dylib_doctest2() { + // can't doctest dylibs as they're statically linked together + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib"] + test = false + "#) + .file("src/lib.rs", r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0).with_stdout("")); +} + +#[test] +fn cyclic_dev_dep_doc_test() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#) + .file("src/lib.rs", r#" + //! ``` + //! extern crate bar; + //! ``` + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = ".." } + "#) + .file("bar/src/lib.rs", r#" + #[allow(unused_extern_crates)] + extern crate foo; + "#) + .build(); + assert_that(p.cargo("test"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE] +[DOCTEST] foo") + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok")); +} + +#[test] +fn dev_dep_with_build_script() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#) + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#) + .file("bar/src/lib.rs", "") + .file("bar/build.rs", "fn main() {}") + .build(); + assert_that(p.cargo("test"), + execs().with_status(0)); +} + +#[test] +fn no_fail_fast() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + pub fn add_one(x: i32) -> i32{ + x + 1 + } + + /// ```rust + /// use foo::sub_one; + /// assert_eq!(sub_one(101), 100); + /// ``` + pub fn sub_one(x: i32) -> i32{ + x - 1 + } + "#) + .file("tests/test_add_one.rs", r#" + extern crate foo; + use foo::*; + + #[test] + fn add_one_test() { + assert_eq!(add_one(1), 2); + } + + #[test] + fn fail_add_one_test() { + assert_eq!(add_one(1), 1); + } + "#) + .file("tests/test_sub_one.rs", r#" + extern crate foo; + use foo::*; + + #[test] + fn sub_one_test() { + assert_eq!(sub_one(1), 0); + } + "#) + .build(); + assert_that(p.cargo("test").arg("--no-fail-fast"), + execs().with_status(101) + .with_stderr_contains("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test_add_one-[..][EXE]") + .with_stdout_contains("running 0 tests") + .with_stderr_contains("\ +[RUNNING] target[/]debug[/]deps[/]test_sub_one-[..][EXE] +[DOCTEST] foo") + .with_stdout_contains("test result: FAILED. [..]") + .with_stdout_contains("test sub_one_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3)); +} + +#[test] +fn test_multiple_packages() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#) + .file("src/lib.rs", "") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#) + .file("d2/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"), + execs().with_status(0) + .with_stderr_contains("\ +[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]") + .with_stderr_contains("\ +[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]") + .with_stdout_contains_n("running 0 tests", 2)); +} + +#[test] +fn bin_does_not_rebuild_tests() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/foo.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); + + sleep_ms(1000); + File::create(&p.root().join("src/main.rs")).unwrap() + .write_all(b"fn main() { 3; }").unwrap(); + + assert_that(p.cargo("test").arg("-v").arg("--no-run"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn selective_test_wonky_profile() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.release] + opt-level = 2 + + [dependencies] + a = { path = "a" } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-v").arg("--no-run").arg("--release") + .arg("-p").arg("foo").arg("-p").arg("a"), + execs().with_status(0)); +} + +#[test] +fn selective_test_optional_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a", optional = true } + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-v").arg("--no-run") + .arg("--features").arg("a").arg("-p").arg("a"), + execs().with_status(0).with_stderr("\ +[COMPILING] a v0.0.1 ([..]) +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]` +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn only_test_docs() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("src/lib.rs", r#" + #[test] + fn foo() { + let a: u32 = "hello"; + } + + /// ``` + /// foo::bar(); + /// println!("ok"); + /// ``` + pub fn bar() { + } + "#) + .file("tests/foo.rs", "this is not rust"); + let p = p.build(); + + assert_that(p.cargo("test").arg("--doc"), + execs().with_status(0) + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo") + .with_stdout_contains("test [..] ... ok")); +} + +#[test] +fn test_panic_abort_with_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [profile.dev] + panic = 'abort' + "#) + .file("src/lib.rs", r#" + extern crate bar; + + #[test] + fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", "") + .build(); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn cfg_test_even_with_no_harness() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + harness = false + doctest = false + "#) + .file("src/lib.rs", r#" + #[cfg(test)] + fn main() { + println!("hello!"); + } + "#) + .build(); + assert_that(p.cargo("test").arg("-v"), + execs().with_status(0) + .with_stdout("hello!\n") + .with_stderr("\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]` +")); +} + +#[test] +fn panic_abort_multiple() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.release] + panic = 'abort' + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + assert_that(p.cargo("test") + .arg("--release").arg("-v") + .arg("-p").arg("foo") + .arg("-p").arg("a"), + execs().with_status(0)); +} + +#[test] +fn pass_correct_cfgs_flags_to_rustdoc() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + default = ["feature_a/default"] + nightly = ["feature_a/nightly"] + + [dependencies.feature_a] + path = "libs/feature_a" + default-features = false + "#) + .file("src/lib.rs", r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + assert!(true); + } + } + "#) + .file("libs/feature_a/Cargo.toml", r#" + [package] + name = "feature_a" + version = "0.1.0" + authors = [] + + [features] + default = ["mock_serde_codegen"] + nightly = ["mock_serde_derive"] + + [dependencies] + mock_serde_derive = { path = "../mock_serde_derive", optional = true } + + [build-dependencies] + mock_serde_codegen = { path = "../mock_serde_codegen", optional = true } + "#) + .file("libs/feature_a/src/lib.rs", r#" + #[cfg(feature = "mock_serde_derive")] + const MSG: &'static str = "This is safe"; + + #[cfg(feature = "mock_serde_codegen")] + const MSG: &'static str = "This is risky"; + + pub fn get() -> &'static str { + MSG + } + "#) + .file("libs/mock_serde_derive/Cargo.toml", r#" + [package] + name = "mock_serde_derive" + version = "0.1.0" + authors = [] + "#) + .file("libs/mock_serde_derive/src/lib.rs", "") + .file("libs/mock_serde_codegen/Cargo.toml", r#" + [package] + name = "mock_serde_codegen" + version = "0.1.0" + authors = [] + "#) + .file("libs/mock_serde_codegen/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test") + .arg("--package").arg("feature_a") + .arg("--verbose"), + execs().with_status(0) + .with_stderr_contains("\ +[DOCTEST] feature_a +[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`")); + + assert_that(p.cargo("test") + .arg("--verbose"), + execs().with_status(0) + .with_stderr_contains("\ +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]feature_a[..]`")); +} + +#[test] +fn test_release_ignore_panic() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.test] + panic = 'abort' + [profile.release] + panic = 'abort' + "#) + .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", ""); + let p = p.build(); + println!("test"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + println!("bench"); + assert_that(p.cargo("bench").arg("-v"), execs().with_status(0)); +} + +#[test] +fn test_many_with_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [features] + foo = [] + + [workspace] + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v") + .arg("-p").arg("a") + .arg("-p").arg("foo") + .arg("--features").arg("foo"), + execs().with_status(0)); +} + +#[test] +fn test_all_workspace() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", r#" + #[test] + fn foo_test() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + #[test] + fn bar_test() {} + "#) + .build(); + + assert_that(p.cargo("test") + .arg("--all"), + execs().with_status(0) + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test bar_test ... ok")); +} + +#[test] +fn test_all_exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + "#) + .file("bar/src/lib.rs", r#" + #[test] + pub fn bar() {} + "#) + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + "#) + .file("baz/src/lib.rs", r#" + #[test] + pub fn baz() { + assert!(false); + } + "#) + .build(); + + assert_that(p.cargo("test") + .arg("--all") + .arg("--exclude") + .arg("baz"), + execs().with_status(0) + .with_stdout_contains("running 1 test +test bar ... ok")); +} + +#[test] +fn test_all_virtual_manifest() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["a", "b"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + "#) + .file("a/src/lib.rs", r#" + #[test] + fn a() {} + "#) + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.1.0" + "#) + .file("b/src/lib.rs", r#" + #[test] + fn b() {} + "#) + .build(); + + assert_that(p.cargo("test") + .arg("--all"), + execs().with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok")); +} + +#[test] +fn test_virtual_manifest_all_implied() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["a", "b"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + "#) + .file("a/src/lib.rs", r#" + #[test] + fn a() {} + "#) + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.1.0" + "#) + .file("b/src/lib.rs", r#" + #[test] + fn b() {} + "#) + .build(); + + assert_that(p.cargo("test"), + execs().with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok")); +} + +#[test] +fn test_all_member_dependency_same_name() { + let p = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["a"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#) + .file("a/src/lib.rs", r#" + #[test] + fn a() {} + "#) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that(p.cargo("test") + .arg("--all"), + execs().with_status(0) + .with_stdout_contains("test a ... ok")); +} + +#[test] +fn doctest_only_with_dev_dep() { + let p = project("workspace") + .file("Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + + [dev-dependencies] + b = { path = "b" } + "#) + .file("src/lib.rs", r#" + /// ``` + /// extern crate b; + /// + /// b::b(); + /// ``` + pub fn a() {} + "#) + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.1.0" + "#) + .file("b/src/lib.rs", r#" + pub fn b() {} + "#) + .build(); + + assert_that(p.cargo("test").arg("--doc").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn test_many_targets() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("src/bin/a.rs", r#" + fn main() {} + #[test] fn bin_a() {} + "#) + .file("src/bin/b.rs", r#" + fn main() {} + #[test] fn bin_b() {} + "#) + .file("src/bin/c.rs", r#" + fn main() {} + #[test] fn bin_c() { panic!(); } + "#) + .file("examples/a.rs", r#" + fn main() {} + #[test] fn example_a() {} + "#) + .file("examples/b.rs", r#" + fn main() {} + #[test] fn example_b() {} + "#) + .file("examples/c.rs", r#" + #[test] fn example_c() { panic!(); } + "#) + .file("tests/a.rs", r#" + #[test] fn test_a() {} + "#) + .file("tests/b.rs", r#" + #[test] fn test_b() {} + "#) + .file("tests/c.rs", r#" + does not compile + "#) + .build(); + + assert_that(p.cargo("test").arg("--verbose") + .arg("--bin").arg("a").arg("--bin").arg("b") + .arg("--example").arg("a").arg("--example").arg("b") + .arg("--test").arg("a").arg("--test").arg("b"), + execs() + .with_status(0) + .with_stdout_contains("test bin_a ... ok") + .with_stdout_contains("test bin_b ... ok") + .with_stdout_contains("test test_a ... ok") + .with_stdout_contains("test test_b ... ok") + .with_stderr_contains("[RUNNING] `rustc --crate-name a examples[/]a.rs [..]`") + .with_stderr_contains("[RUNNING] `rustc --crate-name b examples[/]b.rs [..]`")) +} + +#[test] +fn doctest_and_registry() { + let p = project("workspace") + .file("Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + b = { path = "b" } + c = { path = "c" } + + [workspace] + "#) + .file("src/lib.rs", "") + .file("b/Cargo.toml", r#" + [project] + name = "b" + version = "0.1.0" + "#) + .file("b/src/lib.rs", " + /// ``` + /// b::foo(); + /// ``` + pub fn foo() {} + ") + .file("c/Cargo.toml", r#" + [project] + name = "c" + version = "0.1.0" + + [dependencies] + b = "0.1" + "#) + .file("c/src/lib.rs", "") + .build(); + + Package::new("b", "0.1.0").publish(); + + assert_that(p.cargo("test").arg("--all").arg("-v"), + execs().with_status(0)); +} + +#[test] +fn cargo_test_env() { + let src = format!(r#" + #![crate_type = "rlib"] + + #[test] + fn env_test() {{ + use std::env; + println!("{{}}", env::var("{}").unwrap()); + }} + "#, cargo::CARGO_ENV); + + let p = project("env_test") + .file("Cargo.toml", &basic_lib_manifest("env_test")) + .file("src/lib.rs", &src) + .build(); + + let mut pr = p.cargo("test"); + let cargo = cargo_exe().canonicalize().unwrap(); + assert_that(pr.args(&["--lib", "--", "--nocapture"]), + execs().with_status(0) + .with_stdout_contains(format!("\ +{} +test env_test ... ok +", cargo.to_str().unwrap()))); +} + +#[test] +fn test_order() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + "#) + .file("src/lib.rs", r#" + #[test] fn test_lib() {} + "#) + .file("tests/a.rs", r#" + #[test] fn test_a() {} + "#) + .file("tests/z.rs", r#" + #[test] fn test_z() {} + "#) + .build(); + + assert_that(p.cargo("test").arg("--all"), + execs().with_status(0) + .with_stdout_contains(" +running 1 test +test test_lib ... ok + +test result: ok. [..] + + +running 1 test +test test_a ... ok + +test result: ok. [..] + + +running 1 test +test test_z ... ok + +test result: ok. [..] +")); + +} + +#[test] +fn cyclic_dev() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dev-dependencies] + foo = { path = "." } + "#) + .file("src/lib.rs", r#" + #[test] fn test_lib() {} + "#) + .file("tests/foo.rs", r#" + extern crate foo; + "#) + .build(); + + assert_that(p.cargo("test").arg("--all"), + execs().with_status(0)); +} + +#[test] +fn publish_a_crate_without_tests() { + Package::new("testless", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "testless" + version = "0.1.0" + exclude = ["tests/*"] + + [[test]] + name = "a_test" + "#) + .file("src/lib.rs", "") + + // In real life, the package will have a test, + // which would be excluded from .crate file by the + // `exclude` field. Our test harness does not honor + // exclude though, so let's just not add the file! + // .file("tests/a_test.rs", "") + + .publish(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that(p.cargo("test").arg("--package").arg("testless"), + execs().with_status(0)); +} + +#[test] +fn find_dependency_of_proc_macro_dependency_with_target() { + let workspace = project("workspace") + .file("Cargo.toml", r#" + [workspace] + members = ["root", "proc_macro_dep"] + "#) + .file("root/Cargo.toml", r#" + [project] + name = "root" + version = "0.1.0" + authors = [] + + [dependencies] + proc_macro_dep = { path = "../proc_macro_dep" } + "#) + .file("root/src/lib.rs", r#" + #[macro_use] + extern crate proc_macro_dep; + + #[derive(Noop)] + pub struct X; + "#) + .file("proc_macro_dep/Cargo.toml", r#" + [project] + name = "proc_macro_dep" + version = "0.1.0" + authors = [] + + [lib] + proc-macro = true + + [dependencies] + bar = "^0.1" + "#) + .file("proc_macro_dep/src/lib.rs", r#" + extern crate bar; + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#) + .build(); + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0") + .dep("foo", "0.1") + .file("src/lib.rs", "extern crate foo;") + .publish(); + assert_that(workspace.cargo("test").arg("--all").arg("--target").arg(rustc_host()), + execs().with_status(0)); +} + +#[test] +fn test_hint_not_masked_by_doctest() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/lib.rs", r#" + /// ``` + /// assert_eq!(1, 1); + /// ``` + pub fn this_works() {} + "#) + .file("tests/integ.rs", r#" + #[test] + fn this_fails() { + panic!(); + } + "#) + .build(); + assert_that(p.cargo("test") + .arg("--no-fail-fast"), + execs() + .with_status(101) + .with_stdout_contains("test this_fails ... FAILED") + .with_stdout_contains("[..]this_works (line [..]ok") + .with_stderr_contains("[ERROR] test failed, to rerun pass \ + '--test integ'")); +} diff --git a/collector/compile-benchmarks/cargo/tests/tool-paths.rs b/collector/compile-benchmarks/cargo/tests/tool-paths.rs new file mode 100644 index 000000000..0d3b5d299 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/tool-paths.rs @@ -0,0 +1,173 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::rustc_host; +use cargotest::support::{path2url, project, execs}; +use hamcrest::assert_that; + +#[test] +fn pathless_tools() { + let target = rustc_host(); + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, target)) + .build(); + + assert_that(foo.cargo("build").arg("--verbose"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url()))) +} + +#[test] +fn absolute_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + (r#"C:\\bogus\\nonexistent-ar"#, r#"C:\\bogus\\nonexistent-linker"#) + } else { + (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) + }; + + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", &format!(r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, target = target, ar = config.0, linker = config.1)) + .build(); + + let output = if cfg!(windows) { + (r#"C:\bogus\nonexistent-ar"#, r#"C:\bogus\nonexistent-linker"#) + } else { + (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) + }; + + assert_that(foo.cargo("build").arg("--verbose"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo.url(), ar = output.0, linker = output.1))) +} + +#[test] +fn relative_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#) + } else { + (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#) + }; + + // Funky directory structure to test that relative tool paths are made absolute + // by reference to the `.cargo/..` directory and not to (for example) the CWD. + let origin = project("origin") + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#) + .file("foo/src/lib.rs", "") + .file(".cargo/config", &format!(r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, target = target, ar = config.0, linker = config.1)) + .build(); + + let foo_path = origin.root().join("foo"); + let foo_url = path2url(foo_path.clone()); + let prefix = origin.root().into_os_string().into_string().unwrap(); + let output = if cfg!(windows) { + (format!(r#"{}\.\nonexistent-ar"#, prefix), + format!(r#"{}\.\tools\nonexistent-linker"#, prefix)) + } else { + (format!(r#"{}/./nonexistent-ar"#, prefix), + format!(r#"{}/./tools/nonexistent-linker"#, prefix)) + }; + + assert_that(origin.cargo("build").cwd(foo_path).arg("--verbose"), + execs().with_stderr(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", url = foo_url, ar = output.0, linker = output.1))) +} + +#[test] +fn custom_runner() { + let target = rustc_host(); + + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + "#) + .file("src/main.rs", "fn main() {}") + .file("tests/test.rs", "") + .file("benches/bench.rs", "") + .file(".cargo/config", &format!(r#" + [target.{}] + runner = "nonexistent-runner -r" + "#, target)) + .build(); + + assert_that(p.cargo("run").args(&["--", "--param"]), + execs().with_stderr_contains(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r target[/]debug[/]foo[EXE] --param` +", url = p.url()))); + + assert_that(p.cargo("test").args(&["--test", "test", "--verbose", "--", "--param"]), + execs().with_stderr_contains(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..][/]target[/]debug[/]deps[/]test-[..][EXE] --param` +", url = p.url()))); + + assert_that(p.cargo("bench").args(&["--bench", "bench", "--verbose", "--", "--param"]), + execs().with_stderr_contains(&format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..][/]target[/]release[/]deps[/]bench-[..][EXE] --param --bench` +", url = p.url()))); +} diff --git a/collector/compile-benchmarks/cargo/tests/verify-project.rs b/collector/compile-benchmarks/cargo/tests/verify-project.rs new file mode 100644 index 000000000..509367ad8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/verify-project.rs @@ -0,0 +1,50 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs, main_file, basic_bin_manifest}; +use hamcrest::{assert_that}; + +fn verify_project_success_output() -> String { + r#"{"success":"true"}"#.into() +} + +#[test] +fn cargo_verify_project_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_stdout(verify_project_success_output())); +} + +#[test] +fn cargo_verify_project_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .arg("--manifest-path").arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs().with_status(0) + .with_stdout(verify_project_success_output())); +} + +#[test] +fn cargo_verify_project_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("verify-project") + .cwd(p.root()), + execs().with_status(0) + .with_stdout(verify_project_success_output())); +} diff --git a/collector/compile-benchmarks/cargo/tests/version.rs b/collector/compile-benchmarks/cargo/tests/version.rs new file mode 100644 index 000000000..a63d8700b --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/version.rs @@ -0,0 +1,50 @@ +extern crate cargo; +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +#[test] +fn simple() { + let p = project("foo").build(); + + assert_that(p.cargo("version"), + execs().with_status(0).with_stdout(&format!("{}\n", + cargo::version()))); + + assert_that(p.cargo("--version"), + execs().with_status(0).with_stdout(&format!("{}\n", + cargo::version()))); + +} + + +#[test] +#[cfg_attr(target_os = "windows", ignore)] +fn version_works_without_rustc() { + let p = project("foo").build(); + assert_that(p.cargo("version").env("PATH", ""), + execs().with_status(0)); +} + +#[test] +fn version_works_with_bad_config() { + let p = project("foo") + .file(".cargo/config", "this is not toml") + .build(); + assert_that(p.cargo("version"), + execs().with_status(0)); +} + +#[test] +fn version_works_with_bad_target_dir() { + let p = project("foo") + .file(".cargo/config", r#" + [build] + target-dir = 4 + "#) + .build(); + assert_that(p.cargo("version"), + execs().with_status(0)); +} diff --git a/collector/compile-benchmarks/cargo/tests/warn-on-failure.rs b/collector/compile-benchmarks/cargo/tests/warn-on-failure.rs new file mode 100644 index 000000000..bbd418df6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/warn-on-failure.rs @@ -0,0 +1,93 @@ +extern crate cargotest; +extern crate hamcrest; + +use cargotest::support::{project, execs, Project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +static WARNING1: &'static str = "Hello! I'm a warning. :)"; +static WARNING2: &'static str = "And one more!"; + +fn make_lib(lib_src: &str) { + Package::new("foo", "0.0.1") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + build = "build.rs" + "#) + .file("build.rs", &format!(r#" + fn main() {{ + use std::io::Write; + println!("cargo:warning={{}}", "{}"); + println!("hidden stdout"); + write!(&mut ::std::io::stderr(), "hidden stderr"); + println!("cargo:warning={{}}", "{}"); + }} + "#, WARNING1, WARNING2)) + .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src)) + .publish(); +} + +fn make_upstream(main_src: &str) -> Project { + project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/main.rs", &format!("fn main() {{ {} }}", main_src)) + .build() +} + +#[test] +fn no_warning_on_success() { + make_lib(""); + let upstream = make_upstream(""); + assert_that(upstream.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn no_warning_on_bin_failure() { + make_lib(""); + let upstream = make_upstream("hi()"); + assert_that(upstream.cargo("build"), + execs().with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) + .with_stderr_contains("[UPDATING] registry `[..]`") + .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] foo v0.0.1") + .with_stderr_contains("[COMPILING] bar v0.0.1 ([..])")); +} + +#[test] +fn warning_on_lib_failure() { + make_lib("err()"); + let upstream = make_upstream(""); + assert_that(upstream.cargo("build"), + execs().with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain("[COMPILING] bar v0.0.1 ([..])") + .with_stderr_contains("[UPDATING] registry `[..]`") + .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] foo v0.0.1") + .with_stderr_contains(&format!("[WARNING] {}", WARNING1)) + .with_stderr_contains(&format!("[WARNING] {}", WARNING2))); +} diff --git a/collector/compile-benchmarks/cargo/tests/workspaces.rs b/collector/compile-benchmarks/cargo/tests/workspaces.rs new file mode 100644 index 000000000..093c9d1de --- /dev/null +++ b/collector/compile-benchmarks/cargo/tests/workspaces.rs @@ -0,0 +1,1654 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::io::{Read, Write}; +use std::fs::File; + +use cargotest::sleep_ms; +use cargotest::support::{project, execs, git}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, existing_dir, is_not}; + +#[test] +fn simple_explicit() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn inferred_root() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn inferred_path_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn transitive_path_dep() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { path = "../baz" } + "#) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + "#) + .file("baz/src/main.rs", "fn main() {}") + .file("baz/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("baz")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("baz/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn parent_pointer_works() { + let p = project("foo") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + + [workspace] + "#) + .file("foo/src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.root().join("foo/Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn same_names_in_workspace() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = ".." + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: two packages named `foo` in this workspace: +- [..]Cargo.toml +- [..]Cargo.toml +")); +} + +#[test] +fn parent_doesnt_point_to_child() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(101) + .with_stderr("\ +error: current package believes it's in a workspace when it's not: +current: [..]Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable [..] +")); +} + +#[test] +fn invalid_parent_pointer() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "foo" + "#) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +")); +} + +#[test] +fn invalid_members() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo"] + "#) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +")); +} + +#[test] +fn bare_workspace_ok() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn two_roots() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = [".."] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +")); +} + +#[test] +fn workspace_isnt_root() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: root of a workspace inferred but wasn't a root: [..] +")); +} + +#[test] +fn dangling_member() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#) + .file("bar/src/main.rs", "fn main() {}") + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#) + .file("baz/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: package `[..]` is a member of the wrong workspace +expected: [..] +actual: [..] +")); +} + +#[test] +fn cycle() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101)); +} + +#[test] +fn share_dependencies() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "< 0.1.5" + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + Package::new("dep1", "0.1.8").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] dep1 v0.1.3 ([..]) +[COMPILING] dep1 v0.1.3 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn fetch_fetches_all() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + + assert_that(p.cargo("fetch"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] dep1 v0.1.3 ([..]) +")); +} + +#[test] +fn lock_works_for_everyone() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep2 = "0.1" + + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.0").publish(); + Package::new("dep2", "0.1.0").publish(); + + assert_that(p.cargo("generate-lockfile"), + execs().with_status(0) + .with_stderr("\ +[UPDATING] registry `[..]` +")); + + Package::new("dep1", "0.1.1").publish(); + Package::new("dep2", "0.1.1").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stderr("\ +[DOWNLOADING] dep2 v0.1.0 ([..]) +[COMPILING] dep2 v0.1.0 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0) + .with_stderr("\ +[DOWNLOADING] dep1 v0.1.0 ([..]) +[COMPILING] dep1 v0.1.0 +[COMPILING] bar v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +#[test] +fn virtual_works() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar"] + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0)); + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn explicit_package_argument_works_with_virtual_manifest() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar"] + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build").cwd(p.root()).args(&["--package", "bar"]), + execs().with_status(0)); + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn virtual_misconfigure() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(101) + .with_stderr("\ +error: current package believes it's in a workspace when it's not: +current: [..]bar[..]Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable by adding `bar` to the `workspace.members` array of the \ +manifest located at: [..] +")); +} + +#[test] +fn virtual_build_all_implied() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["bar"] + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build"), + execs().with_status(0)); +} + +#[test] +fn virtual_build_no_members() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + "#); + let p = p.build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: manifest path `[..]` contains no package: The manifest is virtual, \ +and the workspace has no members. +")); +} + +#[test] +fn include_virtual() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + [workspace] + members = ["bar"] + "#) + .file("src/main.rs", "") + .file("bar/Cargo.toml", r#" + [workspace] + "#); + let p = p.build(); + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +")); +} + +#[test] +fn members_include_path_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["p1"] + + [dependencies] + p3 = { path = "p3" } + "#) + .file("src/lib.rs", "") + .file("p1/Cargo.toml", r#" + [project] + name = "p1" + version = "0.1.0" + authors = [] + + [dependencies] + p2 = { path = "../p2" } + "#) + .file("p1/src/lib.rs", "") + .file("p2/Cargo.toml", r#" + [project] + name = "p2" + version = "0.1.0" + authors = [] + "#) + .file("p2/src/lib.rs", "") + .file("p3/Cargo.toml", r#" + [project] + name = "p3" + version = "0.1.0" + authors = [] + "#) + .file("p3/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("p1")), + execs().with_status(0)); + assert_that(p.cargo("build").cwd(p.root().join("p2")), + execs().with_status(0)); + assert_that(p.cargo("build").cwd(p.root().join("p3")), + execs().with_status(0)); + assert_that(p.cargo("build"), + execs().with_status(0)); + + assert_that(&p.root().join("target"), existing_dir()); + assert_that(&p.root().join("p1/target"), is_not(existing_dir())); + assert_that(&p.root().join("p2/target"), is_not(existing_dir())); + assert_that(&p.root().join("p3/target"), is_not(existing_dir())); +} + +#[test] +fn new_warns_you_this_will_not_work() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("new").arg("--lib").arg("bar").env("USER", "foo"), + execs().with_status(0) + .with_stderr("\ +warning: compiling this new crate may not work due to invalid workspace \ +configuration + +current package believes it's in a workspace when it's not: +current: [..] +workspace: [..] + +this may be fixable by ensuring that this crate is depended on by the workspace \ +root: [..] +[CREATED] library `bar` project +")); +} + +#[test] +fn lock_doesnt_change_depending_on_crate() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ['baz'] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("baz/src/lib.rs", ""); + let p = p.build(); + + Package::new("foo", "1.0.0").publish(); + Package::new("bar", "1.0.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lockfile = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile)); + + assert_that(p.cargo("build").cwd(p.root().join("baz")), + execs().with_status(0)); + + let mut lockfile2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile2)); + + assert_eq!(lockfile, lockfile2); +} + +#[test] +fn rebuild_please() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ['lib', 'bin'] + "#) + .file("lib/Cargo.toml", r#" + [package] + name = "lib" + version = "0.1.0" + "#) + .file("lib/src/lib.rs", r#" + pub fn foo() -> u32 { 0 } + "#) + .file("bin/Cargo.toml", r#" + [package] + name = "bin" + version = "0.1.0" + + [dependencies] + lib = { path = "../lib" } + "#) + .file("bin/src/main.rs", r#" + extern crate lib; + + fn main() { + assert_eq!(lib::foo(), 0); + } + "#); + let p = p.build(); + + assert_that(p.cargo("run").cwd(p.root().join("bin")), + execs().with_status(0)); + + sleep_ms(1000); + + t!(t!(File::create(p.root().join("lib/src/lib.rs"))).write_all(br#" + pub fn foo() -> u32 { 1 } + "#)); + + assert_that(p.cargo("build").cwd(p.root().join("lib")), + execs().with_status(0)); + + assert_that(p.cargo("run").cwd(p.root().join("bin")), + execs().with_status(101)); +} + +#[test] +fn workspace_in_git() { + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", r#" + [workspace] + members = ["foo"] + "#) + .file("foo/Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + "#) + .file("foo/src/lib.rs", "") + }).unwrap(); + let p = project("foo") + .file("Cargo.toml", &format!(r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies.foo] + git = '{}' + "#, git_project.url())) + .file("src/lib.rs", r#" + pub fn foo() -> u32 { 0 } + "#); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); +} + + +#[test] +fn lockfile_can_specify_nonexistant_members() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["a"] + "#) + .file("a/Cargo.toml", r#" + [project] + name = "a" + version = "0.1.0" + authors = [] + "#) + .file("a/src/main.rs", "fn main() {}") + .file("Cargo.lock", r#" + [[package]] + name = "a" + version = "0.1.0" + + [[package]] + name = "b" + version = "0.1.0" + "#); + + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("a")), execs().with_status(0)); +} + +#[test] +fn you_cannot_generate_lockfile_for_empty_workspaces() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + "#) + .file("bar/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("update"), + execs().with_status(101) + .with_stderr("\ +error: you can't generate a lockfile for an empty workspace. +")); +} + +#[test] +fn workspace_with_transitive_dev_deps() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dependencies.bar] + path = "bar" + + [workspace] + "#) + .file("src/main.rs", r#"fn main() {}"#) + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dev-dependencies.baz] + path = "../baz" + "#) + .file("bar/src/lib.rs", r#" + pub fn init() {} + + #[cfg(test)] + + #[test] + fn test() { + extern crate baz; + baz::do_stuff(); + } + "#) + .file("baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + "#) + .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#); + let p = p.build(); + + assert_that(p.cargo("test").args(&["-p", "bar"]), + execs().with_status(0)); +} + +#[test] +fn error_if_parent_cargo_toml_is_invalid() { + let p = project("foo") + .file("Cargo.toml", "Totally not a TOML file") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(101) + .with_stderr_contains("\ +[ERROR] failed to parse manifest at `[..]`")); +} + +#[test] +fn relative_path_for_member_works() { + let p = project("foo") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["../bar"] + "#) + .file("foo/src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("foo")), execs().with_status(0)); + assert_that(p.cargo("build").cwd(p.root().join("bar")), execs().with_status(0)); +} + +#[test] +fn relative_path_for_root_works() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + + [dependencies] + subproj = { path = "./subproj" } + "#) + .file("src/main.rs", "fn main() {}") + .file("subproj/Cargo.toml", r#" + [project] + name = "subproj" + version = "0.1.0" + authors = [] + "#) + .file("subproj/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root()) + .arg("--manifest-path").arg("./Cargo.toml"), + execs().with_status(0)); + + assert_that(p.cargo("build").cwd(p.root().join("subproj")) + .arg("--manifest-path").arg("../Cargo.toml"), + execs().with_status(0)); +} + +#[test] +fn path_dep_outside_workspace_is_not_member() { + let p = project("foo") + .file("ws/Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#) + .file("ws/src/lib.rs", r"extern crate foo;") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0)); +} + +#[test] +fn test_in_and_out_of_workspace() { + let p = project("foo") + .file("ws/Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + members = [ "../bar" ] + "#) + .file("ws/src/lib.rs", r"extern crate foo; pub fn f() { foo::f() }") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + "#) + .file("foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }") + .file("bar/Cargo.toml", r#" + [project] + workspace = "../ws" + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0)); + + assert_that(&p.root().join("ws/Cargo.lock"), existing_file()); + assert_that(&p.root().join("ws/target"), existing_dir()); + assert_that(&p.root().join("foo/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("foo/target"), is_not(existing_dir())); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("bar/target"), is_not(existing_dir())); + + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("foo/Cargo.lock"), existing_file()); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("bar/target"), is_not(existing_dir())); +} + +#[test] +fn test_path_dependency_under_member() { + let p = project("foo") + .file("ws/Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#) + .file("ws/src/lib.rs", r"extern crate foo; pub fn f() { foo::f() }") + .file("foo/Cargo.toml", r#" + [project] + workspace = "../ws" + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "./bar" } + "#) + .file("foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }") + .file("foo/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0)); + + assert_that(&p.root().join("foo/bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); + + assert_that(p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0)); + + assert_that(&p.root().join("foo/bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); +} + +#[test] +fn excluded_simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + exclude = ["foo"] + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("foo/target"), existing_dir()); +} + +#[test] +fn exclude_members_preferred() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo/bar"] + exclude = ["foo"] + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that(p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0)); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); +} + +#[test] +fn exclude_but_also_depend() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "foo/bar" } + + [workspace] + exclude = ["foo"] + "#) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that(p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0)); + assert_that(&p.root().join("foo/bar/target"), existing_dir()); +} + +#[test] +fn glob_syntax() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + exclude = ["crates/qux"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/bar/src/main.rs", "fn main() {}") + .file("crates/baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/baz/src/main.rs", "fn main() {}") + .file("crates/qux/Cargo.toml", r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#) + .file("crates/qux/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("crates/bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/baz")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/qux")), + execs().with_status(0)); + assert_that(&p.bin("qux"), is_not(existing_file())); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("crates/bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/baz/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/qux/Cargo.lock"), existing_file()); +} + +/*FIXME: This fails because of how workspace.exclude and workspace.members are working. +#[test] +fn glob_syntax_2() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/b*"] + exclude = ["crates/q*"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/bar/src/main.rs", "fn main() {}") + .file("crates/baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/baz/src/main.rs", "fn main() {}") + .file("crates/qux/Cargo.toml", r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#) + .file("crates/qux/src/main.rs", "fn main() {}"); + p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("crates/bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/baz")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/qux")), + execs().with_status(0)); + assert_that(&p.bin("qux"), is_not(existing_file())); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("crates/bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/baz/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/qux/Cargo.lock"), existing_file()); +} +*/ + +#[test] +fn glob_syntax_invalid_members() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +")); +} + +/// This is a freshness test for feature use with workspaces +/// +/// feat_lib is used by caller1 and caller2, but with different features enabled. +/// This test ensures that alternating building caller1, caller2 doesn't force +/// recompile of feat_lib. +/// +/// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then +/// a single cargo build at the top level will be enough. +#[test] +fn dep_used_with_separate_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["feat_lib", "caller1", "caller2"] + "#) + .file("feat_lib/Cargo.toml", r#" + [project] + name = "feat_lib" + version = "0.1.0" + authors = [] + + [features] + myfeature = [] + "#) + .file("feat_lib/src/lib.rs", "") + .file("caller1/Cargo.toml", r#" + [project] + name = "caller1" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib" } + "#) + .file("caller1/src/main.rs", "fn main() {}") + .file("caller1/src/lib.rs", "") + .file("caller2/Cargo.toml", r#" + [project] + name = "caller2" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib", features = ["myfeature"] } + caller1 = { path = "../caller1" } + "#) + .file("caller2/src/main.rs", "fn main() {}") + .file("caller2/src/lib.rs", ""); + let p = p.build(); + + // Build the entire workspace + assert_that(p.cargo("build").arg("--all"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[..]Compiling caller2 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(&p.bin("caller1"), existing_file()); + assert_that(&p.bin("caller2"), existing_file()); + + + // Build caller1. should build the dep library. Because the features + // are different than the full workspace, it rebuilds. + // Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then + // a single cargo build at the top level will be enough. + assert_that(p.cargo("build").cwd(p.root().join("caller1")), + execs().with_status(0) + .with_stderr("\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + // Alternate building caller2/caller1 a few times, just to make sure + // features are being built separately. Should not rebuild anything + assert_that(p.cargo("build").cwd(p.root().join("caller2")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build").cwd(p.root().join("caller1")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build").cwd(p.root().join("caller2")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +} + +/*FIXME: This fails because of how workspace.exclude and workspace.members are working. +#[test] +fn include_and_exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo"] + exclude = ["foo/bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", ""); + p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that(&p.root().join("foo/target"), is_not(existing_dir())); + assert_that(p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0)); + assert_that(&p.root().join("foo/bar/target"), existing_dir()); +} +*/ diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/.gitignore b/collector/compile-benchmarks/cargo/url-1.5.1/.gitignore new file mode 100644 index 000000000..0284c25cc --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/.gitignore @@ -0,0 +1,3 @@ +target +Cargo.lock +/.cargo/config diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/.travis.yml b/collector/compile-benchmarks/cargo/url-1.5.1/.travis.yml new file mode 100644 index 000000000..6b1d0cafd --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/.travis.yml @@ -0,0 +1,9 @@ +language: rust +rust: + - nightly + - beta + - stable + - 1.17.0 +script: make test +notifications: + webhooks: http://build.servo.org:54856/travis diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/Cargo.toml b/collector/compile-benchmarks/cargo/url-1.5.1/Cargo.toml new file mode 100644 index 000000000..47cca02ee --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/Cargo.toml @@ -0,0 +1,49 @@ +[package] + +name = "url" +# When updating version, also modify html_root_url in the lib.rs +version = "1.5.1" +authors = ["The rust-url developers"] + +description = "URL library for Rust, based on the WHATWG URL Standard" +documentation = "https://docs.rs/url" +repository = "https://github.com/servo/rust-url" +readme = "README.md" +keywords = ["url", "parser"] +categories = ["parser-implementations", "web-programming", "encoding"] +license = "MIT/Apache-2.0" + +[badges] +travis-ci = { repository = "servo/rust-url" } +appveyor = { repository = "servo/rust-url" } + +#[workspace] +#members = [".", "idna", "percent_encoding", "url_serde"] + +[[test]] +name = "unit" + +[[test]] +name = "data" +harness = false + +[lib] +test = false + +[dev-dependencies] +rustc-test = "0.1" +rustc-serialize = "0.3" +serde_json = ">=0.6.1, <0.9" + +[features] +query_encoding = ["encoding"] +heap_size = ["heapsize"] + +[dependencies] +encoding = {version = "0.2", optional = true} +heapsize = {version = ">=0.1.1, <0.5", optional = true} +idna = { version = "0.1.0", path = "./idna" } +matches = "0.1" +percent-encoding = { version = "1.0.0", path = "./percent_encoding" } +rustc-serialize = {version = "0.3", optional = true} +serde = {version = ">=0.6.1, <0.9", optional = true} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-APACHE b/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-MIT b/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-MIT new file mode 100644 index 000000000..24de6b418 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2013-2016 The rust-url developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/Makefile b/collector/compile-benchmarks/cargo/url-1.5.1/Makefile new file mode 100644 index 000000000..7a8dc270d --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/Makefile @@ -0,0 +1,6 @@ +test: + cargo test --features "query_encoding serde rustc-serialize heapsize" + (cd idna && cargo test) + (cd url_serde && cargo test) + +.PHONY: test diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/README.md b/collector/compile-benchmarks/cargo/url-1.5.1/README.md new file mode 100644 index 000000000..0721254af --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/README.md @@ -0,0 +1,10 @@ +rust-url +======== + +[![Travis build Status](https://travis-ci.org/servo/rust-url.svg?branch=master)](https://travis-ci.org/servo/rust-url) [![Appveyor build status](https://ci.appveyor.com/api/projects/status/ulkqx2xcemyod6xa?svg=true)](https://ci.appveyor.com/project/Manishearth/rust-url) + +URL library for Rust, based on the [URL Standard](https://url.spec.whatwg.org/). + +[Documentation](https://docs.rs/url/) + +Please see [UPGRADING.md](https://github.com/servo/rust-url/blob/master/UPGRADING.md) if you are upgrading from 0.x to 1.x. diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/UPGRADING.md b/collector/compile-benchmarks/cargo/url-1.5.1/UPGRADING.md new file mode 100644 index 000000000..f156130f6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/UPGRADING.md @@ -0,0 +1,263 @@ +# Guide to upgrading from url 0.x to 1.x + +* The fields of `Url` are now private because the `Url` constructor, parser, + and setters maintain invariants that could be violated if you were to set the fields directly. + Instead of accessing, for example, `url.scheme`, use the getter method, such as `url.scheme()`. + Instead of assigning directly to a field, for example `url.scheme = "https".to_string()`, + use the setter method, such as `url.set_scheme("https").unwrap()`. + (Some setters validate the new value and return a `Result` that must be used). + +* The methods of `Url` now return `&str` instead of `String`, + thus reducing allocations and making serialization cheap. + +* The `path()` method on `url::Url` instances used to return `Option<&[String]>`; + now it returns `&str`. + If you would like functionality more similar to the old behavior of `path()`, + use `path_segments()` that returns `Option>`. + + Before upgrading: + + ```rust + let issue_list_url = Url::parse( + "https://github.com/rust-lang/rust/issues?labels=E-easy&state=open" + ).unwrap(); + assert_eq!(issue_list_url.path(), Some(&["rust-lang".to_string(), + "rust".to_string(), + "issues".to_string()][..])); + ``` + + After upgrading: + + ```rust + let issue_list_url = Url::parse( + "https://github.com/rust-lang/rust/issues?labels=E-easy&state=open" + ).unwrap(); + assert_eq!(issue_list_url.path(), "/rust-lang/rust/issues"); + assert_eq!(issue_list_url.path_segments().map(|c| c.collect::>()), + Some(vec!["rust-lang", "rust", "issues"])); + ``` + +* The `path_mut()` method on `url::Url` instances that allowed modification of a URL's path + has been replaced by `path_segments_mut()`. + + Before upgrading: + + ```rust + let mut url = Url::parse("https://github.com/rust-lang/rust").unwrap(); + url.path_mut().unwrap().push("issues"); + ``` + + After upgrading: + + ```rust + let mut url = Url::parse("https://github.com/rust-lang/rust").unwrap(); + url.path_segments_mut().unwrap().push("issues"); + ``` + +* The `domain_mut()` method on `url::Url` instances that allowed modification of a URL's domain + has been replaced by `set_host()` and `set_ip_host()`. + +* The `host()` method on `url::Url` instances used to return `Option<&Host>`; + now it returns `Option>`. + The `serialize_host()` method that returned `Option` + has been replaced by the `host_str()` method that returns `Option<&str>`. + +* The `serialize()` method on `url::Url` instances that returned `String` + has been replaced by an `as_str()` method that returns `&str`. + + Before upgrading: + + ```rust + let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap(); + assert_eq!(this_document.serialize(), "http://servo.github.io/rust-url/url/index.html".to_string()); + ``` + + After upgrading: + + ```rust + let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap(); + assert_eq!(this_document.as_str(), "http://servo.github.io/rust-url/url/index.html"); + ``` + +* `url::UrlParser` has been replaced by `url::Url::parse()` and `url::Url::join()`. + + Before upgrading: + + ```rust + let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap(); + let css_url = UrlParser::new().base_url(&this_document).parse("../main.css").unwrap(); + assert_eq!(css_url.serialize(), "http://servo.github.io/rust-url/main.css".to_string()); + ``` + + After upgrading: + + ```rust + let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap(); + let css_url = this_document.join("../main.css").unwrap(); + assert_eq!(css_url.as_str(), "http://servo.github.io/rust-url/main.css"); + ``` + +* `url::parse_path()` and `url::UrlParser::parse_path()` have been removed without replacement. + As a workaround, you can give a base URL that you then ignore too `url::Url::parse()`. + + Before upgrading: + + ```rust + let (path, query, fragment) = url::parse_path("/foo/bar/../baz?q=42").unwrap(); + assert_eq!(path, vec!["foo".to_string(), "baz".to_string()]); + assert_eq!(query, Some("q=42".to_string())); + assert_eq!(fragment, None); + ``` + + After upgrading: + + ```rust + let base = Url::parse("http://example.com").unwrap(); + let with_path = base.join("/foo/bar/../baz?q=42").unwrap(); + assert_eq!(with_path.path(), "/foo/baz"); + assert_eq!(with_path.query(), Some("q=42")); + assert_eq!(with_path.fragment(), None); + ``` + +* The `url::form_urlencoded::serialize()` method + has been replaced with the `url::form_urlencoded::Serializer` struct. + Instead of calling `serialize()` with key/value pairs, + create a new `Serializer` with a new string, + call the `extend_pairs()` method on the `Serializer` instance with the key/value pairs as the argument, + then call `finish()`. + + Before upgrading: + + ```rust + let form = url::form_urlencoded::serialize(form.iter().map(|(k, v)| { + (&k[..], &v[..]) + })); + ``` + + After upgrading: + + ```rust + let form = url::form_urlencoded::Serializer::new(String::new()).extend_pairs( + form.iter().map(|(k, v)| { (&k[..], &v[..]) }) + ).finish(); + ``` + +* The `set_query_from_pairs()` method on `url::Url` instances that took key/value pairs + has been replaced with `query_pairs_mut()`, which allows you to modify the `url::Url`'s query pairs. + + Before upgrading: + + ```rust + let mut url = Url::parse("https://duckduckgo.com/").unwrap(); + let pairs = vec![ + ("q", "test"), + ("ia", "images"), + ]; + url.set_query_from_pairs(pairs.iter().map(|&(k, v)| { + (&k[..], &v[..]) + })); + ``` + + After upgrading: + + ```rust + let mut url = Url::parse("https://duckduckgo.com/").unwrap(); + let pairs = vec![ + ("q", "test"), + ("ia", "images"), + ]; + url.query_pairs_mut().clear().extend_pairs( + pairs.iter().map(|&(k, v)| { (&k[..], &v[..]) }) + ); + ``` + +* `url::SchemeData`, its variants `Relative` and `NonRelative`, + and the struct `url::RelativeSchemeData` have been removed. + Instead of matching on these variants + to determine if you have a URL in a relative scheme such as HTTP + versus a URL in a non-relative scheme as data, + use the `cannot_be_a_base()` method to determine which kind you have. + + Before upgrading: + + ```rust + match url.scheme_data { + url::SchemeData::Relative(..) => {} + url::SchemeData::NonRelative(..) => { + return Err(human(format!("`{}` must have relative scheme \ + data: {}", field, url))) + } + } + ``` + + After upgrading: + + ```rust + if url.cannot_be_a_base() { + return Err(human(format!("`{}` must have relative scheme \ + data: {}", field, url))) + } + ``` + +* The functions `url::whatwg_scheme_type_mapper()`, the `SchemeType` enum, + and the `scheme_type_mapper()` method on `url::UrlParser` instances have been removed. + `SchemeType` had a method for getting the `default_port()`; + to replicate this functionality, use the method `port_or_known_default()` on `url::Url` instances. + The `port_or_default()` method on `url::Url` instances has been removed; + use `port_or_known_default()` instead. + + Before upgrading: + + ```rust + let port = match whatwg_scheme_type_mapper(&url.scheme) { + SchemeType::Relative(port) => port, + _ => return Err(format!("Invalid special scheme: `{}`", + raw_url.scheme)), + }; + ``` + + After upgrading: + + ```rust + let port = match url.port_or_known_default() { + Some(port) => port, + _ => return Err(format!("Invalid special scheme: `{}`", + url.scheme())), + }; + ``` + +* The following formatting utilities have been removed without replacement; + look at their linked previous implementations + if you would like to replicate the functionality in your code: + * [`url::format::PathFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL24) + * [`url::format::UserInfoFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL50) + * [`url::format::UrlNoFragmentFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL70) + +* `url::percent_encoding::percent_decode()` used to have a return type of `Vec`; + now it returns an iterator of decoded `u8` bytes that also implements `Into>`. + Use `.into().to_owned()` to obtain a `Vec`. + (`.collect()` also works but might not be as efficient.) + +* The `url::percent_encoding::EncodeSet` struct and constant instances + used with `url::percent_encoding::percent_encode()` + have been changed to structs that implement the trait `url::percent_encoding::EncodeSet`. + * `SIMPLE_ENCODE_SET`, `QUERY_ENCODE_SET`, `DEFAULT_ENCODE_SET`, + and `USERINFO_ENCODE_SET` have the same behavior. + * `USERNAME_ENCODE_SET` and `PASSWORD_ENCODE_SET` have been removed; + use `USERINFO_ENCODE_SET` instead. + * `HTTP_VALUE_ENCODE_SET` has been removed; + an implementation of it in the new types can be found [in hyper's source]( + https://github.com/hyperium/hyper/blob/67436c5bf615cf5a55a71e32b788afef5985570e/src/header/parsing.rs#L131-L138) + if you need to replicate this functionality in your code. + * `FORM_URLENCODED_ENCODE_SET` has been removed; + instead, use the functionality in `url::form_urlencoded`. + * `PATH_SEGMENT_ENCODE_SET` has been added for use on '/'-separated path segments. + +* `url::percent_encoding::percent_decode_to()` has been removed. + Use `url::percent_encoding::percent_decode()` which returns an iterator. + You can then use the iterator’s `collect()` method + or give it to some data structure’s `extend()` method. +* A number of `ParseError` variants have changed. + [See the documentation for the current set](http://servo.github.io/rust-url/url/enum.ParseError.html). +* `url::OpaqueOrigin::new()` and `url::Origin::UID(OpaqueOrigin)` + have been replaced by `url::Origin::new_opaque()` and `url::Origin::Opaque(OpaqueOrigin)`, respectively. diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/appveyor.yml b/collector/compile-benchmarks/cargo/url-1.5.1/appveyor.yml new file mode 100644 index 000000000..5819d38b1 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/appveyor.yml @@ -0,0 +1,13 @@ +install: + - ps: Start-FileDownload 'https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe' + - rust-nightly-i686-pc-windows-gnu.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - rustc -V + - cargo -V + - git submodule update --init --recursive + +build: false + +test_script: + - cargo build + - cargo test --verbose diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/docs/.nojekyll b/collector/compile-benchmarks/cargo/url-1.5.1/docs/.nojekyll new file mode 100644 index 000000000..e69de29bb diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/docs/404.html b/collector/compile-benchmarks/cargo/url-1.5.1/docs/404.html new file mode 100644 index 000000000..b13eac0ee --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/docs/404.html @@ -0,0 +1,3 @@ + + +Moved to docs.rs diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/docs/index.html b/collector/compile-benchmarks/cargo/url-1.5.1/docs/index.html new file mode 100644 index 000000000..b13eac0ee --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/docs/index.html @@ -0,0 +1,3 @@ + + +Moved to docs.rs diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/.gitignore b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/.gitignore new file mode 100644 index 000000000..572e03bdf --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/.gitignore @@ -0,0 +1,4 @@ + +target +corpus +artifacts diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/Cargo.toml b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/Cargo.toml new file mode 100644 index 000000000..0b108b955 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/Cargo.toml @@ -0,0 +1,21 @@ + +[package] +name = "url-fuzz" +version = "0.0.1" +authors = ["Automatically generated"] +publish = false + +[package.metadata] +cargo-fuzz = true + +[dependencies.url] +path = ".." +[dependencies.libfuzzer-sys] +git = "https://github.com/rust-fuzz/libfuzzer-sys.git" + +[[bin]] +name = "parse" +path = "fuzzers/parse.rs" + +[workspace] +members = ["."] diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/fuzzers/parse.rs b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/fuzzers/parse.rs new file mode 100644 index 000000000..ef8c53f47 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/fuzz/fuzzers/parse.rs @@ -0,0 +1,10 @@ +#![no_main] +#[macro_use] extern crate libfuzzer_sys; +extern crate url; +use std::str; + +fuzz_target!(|data: &[u8]| { + if let Ok(utf8) = str::from_utf8(data) { + let _ = url::Url::parse(utf8); + } +}); diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/github.png b/collector/compile-benchmarks/cargo/url-1.5.1/github.png new file mode 100644 index 000000000..b2c327097 Binary files /dev/null and b/collector/compile-benchmarks/cargo/url-1.5.1/github.png differ diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/Cargo.toml b/collector/compile-benchmarks/cargo/url-1.5.1/idna/Cargo.toml new file mode 100644 index 000000000..8a1924a3b --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "idna" +version = "0.1.3" +authors = ["The rust-url developers"] +description = "IDNA (Internationalizing Domain Names in Applications) and Punycode." +repository = "https://github.com/servo/rust-url/" +license = "MIT/Apache-2.0" + +[lib] +doctest = false +test = false + +[[test]] +name = "tests" +harness = false + +[[test]] +name = "unit" + +[dev-dependencies] +rustc-test = "0.1" +rustc-serialize = "0.3" + +[dependencies] +unicode-bidi = "0.3" +unicode-normalization = "0.1.5" +matches = "0.1" diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/IdnaMappingTable.txt b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/IdnaMappingTable.txt new file mode 100644 index 000000000..0a9c90e16 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/IdnaMappingTable.txt @@ -0,0 +1,8350 @@ +# IdnaMappingTable-9.0.0.txt +# Date: 2016-06-16, 13:35:01 GMT +# © 2016 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode IDNA Compatible Preprocessing (UTS #46) +# For documentation, see http://www.unicode.org/reports/tr46/ + +0000..002C ; disallowed_STD3_valid # 1.1 ..COMMA +002D..002E ; valid # 1.1 HYPHEN-MINUS..FULL STOP +002F ; disallowed_STD3_valid # 1.1 SOLIDUS +0030..0039 ; valid # 1.1 DIGIT ZERO..DIGIT NINE +003A..0040 ; disallowed_STD3_valid # 1.1 COLON..COMMERCIAL AT +0041 ; mapped ; 0061 # 1.1 LATIN CAPITAL LETTER A +0042 ; mapped ; 0062 # 1.1 LATIN CAPITAL LETTER B +0043 ; mapped ; 0063 # 1.1 LATIN CAPITAL LETTER C +0044 ; mapped ; 0064 # 1.1 LATIN CAPITAL LETTER D +0045 ; mapped ; 0065 # 1.1 LATIN CAPITAL LETTER E +0046 ; mapped ; 0066 # 1.1 LATIN CAPITAL LETTER F +0047 ; mapped ; 0067 # 1.1 LATIN CAPITAL LETTER G +0048 ; mapped ; 0068 # 1.1 LATIN CAPITAL LETTER H +0049 ; mapped ; 0069 # 1.1 LATIN CAPITAL LETTER I +004A ; mapped ; 006A # 1.1 LATIN CAPITAL LETTER J +004B ; mapped ; 006B # 1.1 LATIN CAPITAL LETTER K +004C ; mapped ; 006C # 1.1 LATIN CAPITAL LETTER L +004D ; mapped ; 006D # 1.1 LATIN CAPITAL LETTER M +004E ; mapped ; 006E # 1.1 LATIN CAPITAL LETTER N +004F ; mapped ; 006F # 1.1 LATIN CAPITAL LETTER O +0050 ; mapped ; 0070 # 1.1 LATIN CAPITAL LETTER P +0051 ; mapped ; 0071 # 1.1 LATIN CAPITAL LETTER Q +0052 ; mapped ; 0072 # 1.1 LATIN CAPITAL LETTER R +0053 ; mapped ; 0073 # 1.1 LATIN CAPITAL LETTER S +0054 ; mapped ; 0074 # 1.1 LATIN CAPITAL LETTER T +0055 ; mapped ; 0075 # 1.1 LATIN CAPITAL LETTER U +0056 ; mapped ; 0076 # 1.1 LATIN CAPITAL LETTER V +0057 ; mapped ; 0077 # 1.1 LATIN CAPITAL LETTER W +0058 ; mapped ; 0078 # 1.1 LATIN CAPITAL LETTER X +0059 ; mapped ; 0079 # 1.1 LATIN CAPITAL LETTER Y +005A ; mapped ; 007A # 1.1 LATIN CAPITAL LETTER Z +005B..0060 ; disallowed_STD3_valid # 1.1 LEFT SQUARE BRACKET..GRAVE ACCENT +0061..007A ; valid # 1.1 LATIN SMALL LETTER A..LATIN SMALL LETTER Z +007B..007F ; disallowed_STD3_valid # 1.1 LEFT CURLY BRACKET.. +0080..009F ; disallowed # 1.1 .. +00A0 ; disallowed_STD3_mapped ; 0020 # 1.1 NO-BREAK SPACE +00A1..00A7 ; valid ; ; NV8 # 1.1 INVERTED EXCLAMATION MARK..SECTION SIGN +00A8 ; disallowed_STD3_mapped ; 0020 0308 # 1.1 DIAERESIS +00A9 ; valid ; ; NV8 # 1.1 COPYRIGHT SIGN +00AA ; mapped ; 0061 # 1.1 FEMININE ORDINAL INDICATOR +00AB..00AC ; valid ; ; NV8 # 1.1 LEFT-POINTING DOUBLE ANGLE QUOTATION MARK..NOT SIGN +00AD ; ignored # 1.1 SOFT HYPHEN +00AE ; valid ; ; NV8 # 1.1 REGISTERED SIGN +00AF ; disallowed_STD3_mapped ; 0020 0304 # 1.1 MACRON +00B0..00B1 ; valid ; ; NV8 # 1.1 DEGREE SIGN..PLUS-MINUS SIGN +00B2 ; mapped ; 0032 # 1.1 SUPERSCRIPT TWO +00B3 ; mapped ; 0033 # 1.1 SUPERSCRIPT THREE +00B4 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 ACUTE ACCENT +00B5 ; mapped ; 03BC # 1.1 MICRO SIGN +00B6 ; valid ; ; NV8 # 1.1 PILCROW SIGN +00B7 ; valid # 1.1 MIDDLE DOT +00B8 ; disallowed_STD3_mapped ; 0020 0327 # 1.1 CEDILLA +00B9 ; mapped ; 0031 # 1.1 SUPERSCRIPT ONE +00BA ; mapped ; 006F # 1.1 MASCULINE ORDINAL INDICATOR +00BB ; valid ; ; NV8 # 1.1 RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK +00BC ; mapped ; 0031 2044 0034 #1.1 VULGAR FRACTION ONE QUARTER +00BD ; mapped ; 0031 2044 0032 #1.1 VULGAR FRACTION ONE HALF +00BE ; mapped ; 0033 2044 0034 #1.1 VULGAR FRACTION THREE QUARTERS +00BF ; valid ; ; NV8 # 1.1 INVERTED QUESTION MARK +00C0 ; mapped ; 00E0 # 1.1 LATIN CAPITAL LETTER A WITH GRAVE +00C1 ; mapped ; 00E1 # 1.1 LATIN CAPITAL LETTER A WITH ACUTE +00C2 ; mapped ; 00E2 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX +00C3 ; mapped ; 00E3 # 1.1 LATIN CAPITAL LETTER A WITH TILDE +00C4 ; mapped ; 00E4 # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS +00C5 ; mapped ; 00E5 # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE +00C6 ; mapped ; 00E6 # 1.1 LATIN CAPITAL LETTER AE +00C7 ; mapped ; 00E7 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA +00C8 ; mapped ; 00E8 # 1.1 LATIN CAPITAL LETTER E WITH GRAVE +00C9 ; mapped ; 00E9 # 1.1 LATIN CAPITAL LETTER E WITH ACUTE +00CA ; mapped ; 00EA # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX +00CB ; mapped ; 00EB # 1.1 LATIN CAPITAL LETTER E WITH DIAERESIS +00CC ; mapped ; 00EC # 1.1 LATIN CAPITAL LETTER I WITH GRAVE +00CD ; mapped ; 00ED # 1.1 LATIN CAPITAL LETTER I WITH ACUTE +00CE ; mapped ; 00EE # 1.1 LATIN CAPITAL LETTER I WITH CIRCUMFLEX +00CF ; mapped ; 00EF # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS +00D0 ; mapped ; 00F0 # 1.1 LATIN CAPITAL LETTER ETH +00D1 ; mapped ; 00F1 # 1.1 LATIN CAPITAL LETTER N WITH TILDE +00D2 ; mapped ; 00F2 # 1.1 LATIN CAPITAL LETTER O WITH GRAVE +00D3 ; mapped ; 00F3 # 1.1 LATIN CAPITAL LETTER O WITH ACUTE +00D4 ; mapped ; 00F4 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX +00D5 ; mapped ; 00F5 # 1.1 LATIN CAPITAL LETTER O WITH TILDE +00D6 ; mapped ; 00F6 # 1.1 LATIN CAPITAL LETTER O WITH DIAERESIS +00D7 ; valid ; ; NV8 # 1.1 MULTIPLICATION SIGN +00D8 ; mapped ; 00F8 # 1.1 LATIN CAPITAL LETTER O WITH STROKE +00D9 ; mapped ; 00F9 # 1.1 LATIN CAPITAL LETTER U WITH GRAVE +00DA ; mapped ; 00FA # 1.1 LATIN CAPITAL LETTER U WITH ACUTE +00DB ; mapped ; 00FB # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX +00DC ; mapped ; 00FC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS +00DD ; mapped ; 00FD # 1.1 LATIN CAPITAL LETTER Y WITH ACUTE +00DE ; mapped ; 00FE # 1.1 LATIN CAPITAL LETTER THORN +00DF ; deviation ; 0073 0073 # 1.1 LATIN SMALL LETTER SHARP S +00E0..00F6 ; valid # 1.1 LATIN SMALL LETTER A WITH GRAVE..LATIN SMALL LETTER O WITH DIAERESIS +00F7 ; valid ; ; NV8 # 1.1 DIVISION SIGN +00F8..00FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS +0100 ; mapped ; 0101 # 1.1 LATIN CAPITAL LETTER A WITH MACRON +0101 ; valid # 1.1 LATIN SMALL LETTER A WITH MACRON +0102 ; mapped ; 0103 # 1.1 LATIN CAPITAL LETTER A WITH BREVE +0103 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE +0104 ; mapped ; 0105 # 1.1 LATIN CAPITAL LETTER A WITH OGONEK +0105 ; valid # 1.1 LATIN SMALL LETTER A WITH OGONEK +0106 ; mapped ; 0107 # 1.1 LATIN CAPITAL LETTER C WITH ACUTE +0107 ; valid # 1.1 LATIN SMALL LETTER C WITH ACUTE +0108 ; mapped ; 0109 # 1.1 LATIN CAPITAL LETTER C WITH CIRCUMFLEX +0109 ; valid # 1.1 LATIN SMALL LETTER C WITH CIRCUMFLEX +010A ; mapped ; 010B # 1.1 LATIN CAPITAL LETTER C WITH DOT ABOVE +010B ; valid # 1.1 LATIN SMALL LETTER C WITH DOT ABOVE +010C ; mapped ; 010D # 1.1 LATIN CAPITAL LETTER C WITH CARON +010D ; valid # 1.1 LATIN SMALL LETTER C WITH CARON +010E ; mapped ; 010F # 1.1 LATIN CAPITAL LETTER D WITH CARON +010F ; valid # 1.1 LATIN SMALL LETTER D WITH CARON +0110 ; mapped ; 0111 # 1.1 LATIN CAPITAL LETTER D WITH STROKE +0111 ; valid # 1.1 LATIN SMALL LETTER D WITH STROKE +0112 ; mapped ; 0113 # 1.1 LATIN CAPITAL LETTER E WITH MACRON +0113 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON +0114 ; mapped ; 0115 # 1.1 LATIN CAPITAL LETTER E WITH BREVE +0115 ; valid # 1.1 LATIN SMALL LETTER E WITH BREVE +0116 ; mapped ; 0117 # 1.1 LATIN CAPITAL LETTER E WITH DOT ABOVE +0117 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT ABOVE +0118 ; mapped ; 0119 # 1.1 LATIN CAPITAL LETTER E WITH OGONEK +0119 ; valid # 1.1 LATIN SMALL LETTER E WITH OGONEK +011A ; mapped ; 011B # 1.1 LATIN CAPITAL LETTER E WITH CARON +011B ; valid # 1.1 LATIN SMALL LETTER E WITH CARON +011C ; mapped ; 011D # 1.1 LATIN CAPITAL LETTER G WITH CIRCUMFLEX +011D ; valid # 1.1 LATIN SMALL LETTER G WITH CIRCUMFLEX +011E ; mapped ; 011F # 1.1 LATIN CAPITAL LETTER G WITH BREVE +011F ; valid # 1.1 LATIN SMALL LETTER G WITH BREVE +0120 ; mapped ; 0121 # 1.1 LATIN CAPITAL LETTER G WITH DOT ABOVE +0121 ; valid # 1.1 LATIN SMALL LETTER G WITH DOT ABOVE +0122 ; mapped ; 0123 # 1.1 LATIN CAPITAL LETTER G WITH CEDILLA +0123 ; valid # 1.1 LATIN SMALL LETTER G WITH CEDILLA +0124 ; mapped ; 0125 # 1.1 LATIN CAPITAL LETTER H WITH CIRCUMFLEX +0125 ; valid # 1.1 LATIN SMALL LETTER H WITH CIRCUMFLEX +0126 ; mapped ; 0127 # 1.1 LATIN CAPITAL LETTER H WITH STROKE +0127 ; valid # 1.1 LATIN SMALL LETTER H WITH STROKE +0128 ; mapped ; 0129 # 1.1 LATIN CAPITAL LETTER I WITH TILDE +0129 ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE +012A ; mapped ; 012B # 1.1 LATIN CAPITAL LETTER I WITH MACRON +012B ; valid # 1.1 LATIN SMALL LETTER I WITH MACRON +012C ; mapped ; 012D # 1.1 LATIN CAPITAL LETTER I WITH BREVE +012D ; valid # 1.1 LATIN SMALL LETTER I WITH BREVE +012E ; mapped ; 012F # 1.1 LATIN CAPITAL LETTER I WITH OGONEK +012F ; valid # 1.1 LATIN SMALL LETTER I WITH OGONEK +0130 ; mapped ; 0069 0307 # 1.1 LATIN CAPITAL LETTER I WITH DOT ABOVE +0131 ; valid # 1.1 LATIN SMALL LETTER DOTLESS I +0132..0133 ; mapped ; 0069 006A # 1.1 LATIN CAPITAL LIGATURE IJ..LATIN SMALL LIGATURE IJ +0134 ; mapped ; 0135 # 1.1 LATIN CAPITAL LETTER J WITH CIRCUMFLEX +0135 ; valid # 1.1 LATIN SMALL LETTER J WITH CIRCUMFLEX +0136 ; mapped ; 0137 # 1.1 LATIN CAPITAL LETTER K WITH CEDILLA +0137..0138 ; valid # 1.1 LATIN SMALL LETTER K WITH CEDILLA..LATIN SMALL LETTER KRA +0139 ; mapped ; 013A # 1.1 LATIN CAPITAL LETTER L WITH ACUTE +013A ; valid # 1.1 LATIN SMALL LETTER L WITH ACUTE +013B ; mapped ; 013C # 1.1 LATIN CAPITAL LETTER L WITH CEDILLA +013C ; valid # 1.1 LATIN SMALL LETTER L WITH CEDILLA +013D ; mapped ; 013E # 1.1 LATIN CAPITAL LETTER L WITH CARON +013E ; valid # 1.1 LATIN SMALL LETTER L WITH CARON +013F..0140 ; mapped ; 006C 00B7 # 1.1 LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH MIDDLE DOT +0141 ; mapped ; 0142 # 1.1 LATIN CAPITAL LETTER L WITH STROKE +0142 ; valid # 1.1 LATIN SMALL LETTER L WITH STROKE +0143 ; mapped ; 0144 # 1.1 LATIN CAPITAL LETTER N WITH ACUTE +0144 ; valid # 1.1 LATIN SMALL LETTER N WITH ACUTE +0145 ; mapped ; 0146 # 1.1 LATIN CAPITAL LETTER N WITH CEDILLA +0146 ; valid # 1.1 LATIN SMALL LETTER N WITH CEDILLA +0147 ; mapped ; 0148 # 1.1 LATIN CAPITAL LETTER N WITH CARON +0148 ; valid # 1.1 LATIN SMALL LETTER N WITH CARON +0149 ; mapped ; 02BC 006E # 1.1 LATIN SMALL LETTER N PRECEDED BY APOSTROPHE +014A ; mapped ; 014B # 1.1 LATIN CAPITAL LETTER ENG +014B ; valid # 1.1 LATIN SMALL LETTER ENG +014C ; mapped ; 014D # 1.1 LATIN CAPITAL LETTER O WITH MACRON +014D ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON +014E ; mapped ; 014F # 1.1 LATIN CAPITAL LETTER O WITH BREVE +014F ; valid # 1.1 LATIN SMALL LETTER O WITH BREVE +0150 ; mapped ; 0151 # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE ACUTE +0151 ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE ACUTE +0152 ; mapped ; 0153 # 1.1 LATIN CAPITAL LIGATURE OE +0153 ; valid # 1.1 LATIN SMALL LIGATURE OE +0154 ; mapped ; 0155 # 1.1 LATIN CAPITAL LETTER R WITH ACUTE +0155 ; valid # 1.1 LATIN SMALL LETTER R WITH ACUTE +0156 ; mapped ; 0157 # 1.1 LATIN CAPITAL LETTER R WITH CEDILLA +0157 ; valid # 1.1 LATIN SMALL LETTER R WITH CEDILLA +0158 ; mapped ; 0159 # 1.1 LATIN CAPITAL LETTER R WITH CARON +0159 ; valid # 1.1 LATIN SMALL LETTER R WITH CARON +015A ; mapped ; 015B # 1.1 LATIN CAPITAL LETTER S WITH ACUTE +015B ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE +015C ; mapped ; 015D # 1.1 LATIN CAPITAL LETTER S WITH CIRCUMFLEX +015D ; valid # 1.1 LATIN SMALL LETTER S WITH CIRCUMFLEX +015E ; mapped ; 015F # 1.1 LATIN CAPITAL LETTER S WITH CEDILLA +015F ; valid # 1.1 LATIN SMALL LETTER S WITH CEDILLA +0160 ; mapped ; 0161 # 1.1 LATIN CAPITAL LETTER S WITH CARON +0161 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON +0162 ; mapped ; 0163 # 1.1 LATIN CAPITAL LETTER T WITH CEDILLA +0163 ; valid # 1.1 LATIN SMALL LETTER T WITH CEDILLA +0164 ; mapped ; 0165 # 1.1 LATIN CAPITAL LETTER T WITH CARON +0165 ; valid # 1.1 LATIN SMALL LETTER T WITH CARON +0166 ; mapped ; 0167 # 1.1 LATIN CAPITAL LETTER T WITH STROKE +0167 ; valid # 1.1 LATIN SMALL LETTER T WITH STROKE +0168 ; mapped ; 0169 # 1.1 LATIN CAPITAL LETTER U WITH TILDE +0169 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE +016A ; mapped ; 016B # 1.1 LATIN CAPITAL LETTER U WITH MACRON +016B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON +016C ; mapped ; 016D # 1.1 LATIN CAPITAL LETTER U WITH BREVE +016D ; valid # 1.1 LATIN SMALL LETTER U WITH BREVE +016E ; mapped ; 016F # 1.1 LATIN CAPITAL LETTER U WITH RING ABOVE +016F ; valid # 1.1 LATIN SMALL LETTER U WITH RING ABOVE +0170 ; mapped ; 0171 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE ACUTE +0171 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE ACUTE +0172 ; mapped ; 0173 # 1.1 LATIN CAPITAL LETTER U WITH OGONEK +0173 ; valid # 1.1 LATIN SMALL LETTER U WITH OGONEK +0174 ; mapped ; 0175 # 1.1 LATIN CAPITAL LETTER W WITH CIRCUMFLEX +0175 ; valid # 1.1 LATIN SMALL LETTER W WITH CIRCUMFLEX +0176 ; mapped ; 0177 # 1.1 LATIN CAPITAL LETTER Y WITH CIRCUMFLEX +0177 ; valid # 1.1 LATIN SMALL LETTER Y WITH CIRCUMFLEX +0178 ; mapped ; 00FF # 1.1 LATIN CAPITAL LETTER Y WITH DIAERESIS +0179 ; mapped ; 017A # 1.1 LATIN CAPITAL LETTER Z WITH ACUTE +017A ; valid # 1.1 LATIN SMALL LETTER Z WITH ACUTE +017B ; mapped ; 017C # 1.1 LATIN CAPITAL LETTER Z WITH DOT ABOVE +017C ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT ABOVE +017D ; mapped ; 017E # 1.1 LATIN CAPITAL LETTER Z WITH CARON +017E ; valid # 1.1 LATIN SMALL LETTER Z WITH CARON +017F ; mapped ; 0073 # 1.1 LATIN SMALL LETTER LONG S +0180 ; valid # 1.1 LATIN SMALL LETTER B WITH STROKE +0181 ; mapped ; 0253 # 1.1 LATIN CAPITAL LETTER B WITH HOOK +0182 ; mapped ; 0183 # 1.1 LATIN CAPITAL LETTER B WITH TOPBAR +0183 ; valid # 1.1 LATIN SMALL LETTER B WITH TOPBAR +0184 ; mapped ; 0185 # 1.1 LATIN CAPITAL LETTER TONE SIX +0185 ; valid # 1.1 LATIN SMALL LETTER TONE SIX +0186 ; mapped ; 0254 # 1.1 LATIN CAPITAL LETTER OPEN O +0187 ; mapped ; 0188 # 1.1 LATIN CAPITAL LETTER C WITH HOOK +0188 ; valid # 1.1 LATIN SMALL LETTER C WITH HOOK +0189 ; mapped ; 0256 # 1.1 LATIN CAPITAL LETTER AFRICAN D +018A ; mapped ; 0257 # 1.1 LATIN CAPITAL LETTER D WITH HOOK +018B ; mapped ; 018C # 1.1 LATIN CAPITAL LETTER D WITH TOPBAR +018C..018D ; valid # 1.1 LATIN SMALL LETTER D WITH TOPBAR..LATIN SMALL LETTER TURNED DELTA +018E ; mapped ; 01DD # 1.1 LATIN CAPITAL LETTER REVERSED E +018F ; mapped ; 0259 # 1.1 LATIN CAPITAL LETTER SCHWA +0190 ; mapped ; 025B # 1.1 LATIN CAPITAL LETTER OPEN E +0191 ; mapped ; 0192 # 1.1 LATIN CAPITAL LETTER F WITH HOOK +0192 ; valid # 1.1 LATIN SMALL LETTER F WITH HOOK +0193 ; mapped ; 0260 # 1.1 LATIN CAPITAL LETTER G WITH HOOK +0194 ; mapped ; 0263 # 1.1 LATIN CAPITAL LETTER GAMMA +0195 ; valid # 1.1 LATIN SMALL LETTER HV +0196 ; mapped ; 0269 # 1.1 LATIN CAPITAL LETTER IOTA +0197 ; mapped ; 0268 # 1.1 LATIN CAPITAL LETTER I WITH STROKE +0198 ; mapped ; 0199 # 1.1 LATIN CAPITAL LETTER K WITH HOOK +0199..019B ; valid # 1.1 LATIN SMALL LETTER K WITH HOOK..LATIN SMALL LETTER LAMBDA WITH STROKE +019C ; mapped ; 026F # 1.1 LATIN CAPITAL LETTER TURNED M +019D ; mapped ; 0272 # 1.1 LATIN CAPITAL LETTER N WITH LEFT HOOK +019E ; valid # 1.1 LATIN SMALL LETTER N WITH LONG RIGHT LEG +019F ; mapped ; 0275 # 1.1 LATIN CAPITAL LETTER O WITH MIDDLE TILDE +01A0 ; mapped ; 01A1 # 1.1 LATIN CAPITAL LETTER O WITH HORN +01A1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN +01A2 ; mapped ; 01A3 # 1.1 LATIN CAPITAL LETTER OI +01A3 ; valid # 1.1 LATIN SMALL LETTER OI +01A4 ; mapped ; 01A5 # 1.1 LATIN CAPITAL LETTER P WITH HOOK +01A5 ; valid # 1.1 LATIN SMALL LETTER P WITH HOOK +01A6 ; mapped ; 0280 # 1.1 LATIN LETTER YR +01A7 ; mapped ; 01A8 # 1.1 LATIN CAPITAL LETTER TONE TWO +01A8 ; valid # 1.1 LATIN SMALL LETTER TONE TWO +01A9 ; mapped ; 0283 # 1.1 LATIN CAPITAL LETTER ESH +01AA..01AB ; valid # 1.1 LATIN LETTER REVERSED ESH LOOP..LATIN SMALL LETTER T WITH PALATAL HOOK +01AC ; mapped ; 01AD # 1.1 LATIN CAPITAL LETTER T WITH HOOK +01AD ; valid # 1.1 LATIN SMALL LETTER T WITH HOOK +01AE ; mapped ; 0288 # 1.1 LATIN CAPITAL LETTER T WITH RETROFLEX HOOK +01AF ; mapped ; 01B0 # 1.1 LATIN CAPITAL LETTER U WITH HORN +01B0 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN +01B1 ; mapped ; 028A # 1.1 LATIN CAPITAL LETTER UPSILON +01B2 ; mapped ; 028B # 1.1 LATIN CAPITAL LETTER V WITH HOOK +01B3 ; mapped ; 01B4 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK +01B4 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK +01B5 ; mapped ; 01B6 # 1.1 LATIN CAPITAL LETTER Z WITH STROKE +01B6 ; valid # 1.1 LATIN SMALL LETTER Z WITH STROKE +01B7 ; mapped ; 0292 # 1.1 LATIN CAPITAL LETTER EZH +01B8 ; mapped ; 01B9 # 1.1 LATIN CAPITAL LETTER EZH REVERSED +01B9..01BB ; valid # 1.1 LATIN SMALL LETTER EZH REVERSED..LATIN LETTER TWO WITH STROKE +01BC ; mapped ; 01BD # 1.1 LATIN CAPITAL LETTER TONE FIVE +01BD..01C3 ; valid # 1.1 LATIN SMALL LETTER TONE FIVE..LATIN LETTER RETROFLEX CLICK +01C4..01C6 ; mapped ; 0064 017E # 1.1 LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER DZ WITH CARON +01C7..01C9 ; mapped ; 006C 006A # 1.1 LATIN CAPITAL LETTER LJ..LATIN SMALL LETTER LJ +01CA..01CC ; mapped ; 006E 006A # 1.1 LATIN CAPITAL LETTER NJ..LATIN SMALL LETTER NJ +01CD ; mapped ; 01CE # 1.1 LATIN CAPITAL LETTER A WITH CARON +01CE ; valid # 1.1 LATIN SMALL LETTER A WITH CARON +01CF ; mapped ; 01D0 # 1.1 LATIN CAPITAL LETTER I WITH CARON +01D0 ; valid # 1.1 LATIN SMALL LETTER I WITH CARON +01D1 ; mapped ; 01D2 # 1.1 LATIN CAPITAL LETTER O WITH CARON +01D2 ; valid # 1.1 LATIN SMALL LETTER O WITH CARON +01D3 ; mapped ; 01D4 # 1.1 LATIN CAPITAL LETTER U WITH CARON +01D4 ; valid # 1.1 LATIN SMALL LETTER U WITH CARON +01D5 ; mapped ; 01D6 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON +01D6 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND MACRON +01D7 ; mapped ; 01D8 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE +01D8 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE +01D9 ; mapped ; 01DA # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON +01DA ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND CARON +01DB ; mapped ; 01DC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE +01DC..01DD ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE..LATIN SMALL LETTER TURNED E +01DE ; mapped ; 01DF # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON +01DF ; valid # 1.1 LATIN SMALL LETTER A WITH DIAERESIS AND MACRON +01E0 ; mapped ; 01E1 # 1.1 LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON +01E1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON +01E2 ; mapped ; 01E3 # 1.1 LATIN CAPITAL LETTER AE WITH MACRON +01E3 ; valid # 1.1 LATIN SMALL LETTER AE WITH MACRON +01E4 ; mapped ; 01E5 # 1.1 LATIN CAPITAL LETTER G WITH STROKE +01E5 ; valid # 1.1 LATIN SMALL LETTER G WITH STROKE +01E6 ; mapped ; 01E7 # 1.1 LATIN CAPITAL LETTER G WITH CARON +01E7 ; valid # 1.1 LATIN SMALL LETTER G WITH CARON +01E8 ; mapped ; 01E9 # 1.1 LATIN CAPITAL LETTER K WITH CARON +01E9 ; valid # 1.1 LATIN SMALL LETTER K WITH CARON +01EA ; mapped ; 01EB # 1.1 LATIN CAPITAL LETTER O WITH OGONEK +01EB ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK +01EC ; mapped ; 01ED # 1.1 LATIN CAPITAL LETTER O WITH OGONEK AND MACRON +01ED ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK AND MACRON +01EE ; mapped ; 01EF # 1.1 LATIN CAPITAL LETTER EZH WITH CARON +01EF..01F0 ; valid # 1.1 LATIN SMALL LETTER EZH WITH CARON..LATIN SMALL LETTER J WITH CARON +01F1..01F3 ; mapped ; 0064 007A # 1.1 LATIN CAPITAL LETTER DZ..LATIN SMALL LETTER DZ +01F4 ; mapped ; 01F5 # 1.1 LATIN CAPITAL LETTER G WITH ACUTE +01F5 ; valid # 1.1 LATIN SMALL LETTER G WITH ACUTE +01F6 ; mapped ; 0195 # 3.0 LATIN CAPITAL LETTER HWAIR +01F7 ; mapped ; 01BF # 3.0 LATIN CAPITAL LETTER WYNN +01F8 ; mapped ; 01F9 # 3.0 LATIN CAPITAL LETTER N WITH GRAVE +01F9 ; valid # 3.0 LATIN SMALL LETTER N WITH GRAVE +01FA ; mapped ; 01FB # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE +01FB ; valid # 1.1 LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE +01FC ; mapped ; 01FD # 1.1 LATIN CAPITAL LETTER AE WITH ACUTE +01FD ; valid # 1.1 LATIN SMALL LETTER AE WITH ACUTE +01FE ; mapped ; 01FF # 1.1 LATIN CAPITAL LETTER O WITH STROKE AND ACUTE +01FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE AND ACUTE +0200 ; mapped ; 0201 # 1.1 LATIN CAPITAL LETTER A WITH DOUBLE GRAVE +0201 ; valid # 1.1 LATIN SMALL LETTER A WITH DOUBLE GRAVE +0202 ; mapped ; 0203 # 1.1 LATIN CAPITAL LETTER A WITH INVERTED BREVE +0203 ; valid # 1.1 LATIN SMALL LETTER A WITH INVERTED BREVE +0204 ; mapped ; 0205 # 1.1 LATIN CAPITAL LETTER E WITH DOUBLE GRAVE +0205 ; valid # 1.1 LATIN SMALL LETTER E WITH DOUBLE GRAVE +0206 ; mapped ; 0207 # 1.1 LATIN CAPITAL LETTER E WITH INVERTED BREVE +0207 ; valid # 1.1 LATIN SMALL LETTER E WITH INVERTED BREVE +0208 ; mapped ; 0209 # 1.1 LATIN CAPITAL LETTER I WITH DOUBLE GRAVE +0209 ; valid # 1.1 LATIN SMALL LETTER I WITH DOUBLE GRAVE +020A ; mapped ; 020B # 1.1 LATIN CAPITAL LETTER I WITH INVERTED BREVE +020B ; valid # 1.1 LATIN SMALL LETTER I WITH INVERTED BREVE +020C ; mapped ; 020D # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE GRAVE +020D ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE GRAVE +020E ; mapped ; 020F # 1.1 LATIN CAPITAL LETTER O WITH INVERTED BREVE +020F ; valid # 1.1 LATIN SMALL LETTER O WITH INVERTED BREVE +0210 ; mapped ; 0211 # 1.1 LATIN CAPITAL LETTER R WITH DOUBLE GRAVE +0211 ; valid # 1.1 LATIN SMALL LETTER R WITH DOUBLE GRAVE +0212 ; mapped ; 0213 # 1.1 LATIN CAPITAL LETTER R WITH INVERTED BREVE +0213 ; valid # 1.1 LATIN SMALL LETTER R WITH INVERTED BREVE +0214 ; mapped ; 0215 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE GRAVE +0215 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE GRAVE +0216 ; mapped ; 0217 # 1.1 LATIN CAPITAL LETTER U WITH INVERTED BREVE +0217 ; valid # 1.1 LATIN SMALL LETTER U WITH INVERTED BREVE +0218 ; mapped ; 0219 # 3.0 LATIN CAPITAL LETTER S WITH COMMA BELOW +0219 ; valid # 3.0 LATIN SMALL LETTER S WITH COMMA BELOW +021A ; mapped ; 021B # 3.0 LATIN CAPITAL LETTER T WITH COMMA BELOW +021B ; valid # 3.0 LATIN SMALL LETTER T WITH COMMA BELOW +021C ; mapped ; 021D # 3.0 LATIN CAPITAL LETTER YOGH +021D ; valid # 3.0 LATIN SMALL LETTER YOGH +021E ; mapped ; 021F # 3.0 LATIN CAPITAL LETTER H WITH CARON +021F ; valid # 3.0 LATIN SMALL LETTER H WITH CARON +0220 ; mapped ; 019E # 3.2 LATIN CAPITAL LETTER N WITH LONG RIGHT LEG +0221 ; valid # 4.0 LATIN SMALL LETTER D WITH CURL +0222 ; mapped ; 0223 # 3.0 LATIN CAPITAL LETTER OU +0223 ; valid # 3.0 LATIN SMALL LETTER OU +0224 ; mapped ; 0225 # 3.0 LATIN CAPITAL LETTER Z WITH HOOK +0225 ; valid # 3.0 LATIN SMALL LETTER Z WITH HOOK +0226 ; mapped ; 0227 # 3.0 LATIN CAPITAL LETTER A WITH DOT ABOVE +0227 ; valid # 3.0 LATIN SMALL LETTER A WITH DOT ABOVE +0228 ; mapped ; 0229 # 3.0 LATIN CAPITAL LETTER E WITH CEDILLA +0229 ; valid # 3.0 LATIN SMALL LETTER E WITH CEDILLA +022A ; mapped ; 022B # 3.0 LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON +022B ; valid # 3.0 LATIN SMALL LETTER O WITH DIAERESIS AND MACRON +022C ; mapped ; 022D # 3.0 LATIN CAPITAL LETTER O WITH TILDE AND MACRON +022D ; valid # 3.0 LATIN SMALL LETTER O WITH TILDE AND MACRON +022E ; mapped ; 022F # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE +022F ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE +0230 ; mapped ; 0231 # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON +0231 ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON +0232 ; mapped ; 0233 # 3.0 LATIN CAPITAL LETTER Y WITH MACRON +0233 ; valid # 3.0 LATIN SMALL LETTER Y WITH MACRON +0234..0236 ; valid # 4.0 LATIN SMALL LETTER L WITH CURL..LATIN SMALL LETTER T WITH CURL +0237..0239 ; valid # 4.1 LATIN SMALL LETTER DOTLESS J..LATIN SMALL LETTER QP DIGRAPH +023A ; mapped ; 2C65 # 4.1 LATIN CAPITAL LETTER A WITH STROKE +023B ; mapped ; 023C # 4.1 LATIN CAPITAL LETTER C WITH STROKE +023C ; valid # 4.1 LATIN SMALL LETTER C WITH STROKE +023D ; mapped ; 019A # 4.1 LATIN CAPITAL LETTER L WITH BAR +023E ; mapped ; 2C66 # 4.1 LATIN CAPITAL LETTER T WITH DIAGONAL STROKE +023F..0240 ; valid # 4.1 LATIN SMALL LETTER S WITH SWASH TAIL..LATIN SMALL LETTER Z WITH SWASH TAIL +0241 ; mapped ; 0242 # 4.1 LATIN CAPITAL LETTER GLOTTAL STOP +0242 ; valid # 5.0 LATIN SMALL LETTER GLOTTAL STOP +0243 ; mapped ; 0180 # 5.0 LATIN CAPITAL LETTER B WITH STROKE +0244 ; mapped ; 0289 # 5.0 LATIN CAPITAL LETTER U BAR +0245 ; mapped ; 028C # 5.0 LATIN CAPITAL LETTER TURNED V +0246 ; mapped ; 0247 # 5.0 LATIN CAPITAL LETTER E WITH STROKE +0247 ; valid # 5.0 LATIN SMALL LETTER E WITH STROKE +0248 ; mapped ; 0249 # 5.0 LATIN CAPITAL LETTER J WITH STROKE +0249 ; valid # 5.0 LATIN SMALL LETTER J WITH STROKE +024A ; mapped ; 024B # 5.0 LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL +024B ; valid # 5.0 LATIN SMALL LETTER Q WITH HOOK TAIL +024C ; mapped ; 024D # 5.0 LATIN CAPITAL LETTER R WITH STROKE +024D ; valid # 5.0 LATIN SMALL LETTER R WITH STROKE +024E ; mapped ; 024F # 5.0 LATIN CAPITAL LETTER Y WITH STROKE +024F ; valid # 5.0 LATIN SMALL LETTER Y WITH STROKE +0250..02A8 ; valid # 1.1 LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER TC DIGRAPH WITH CURL +02A9..02AD ; valid # 3.0 LATIN SMALL LETTER FENG DIGRAPH..LATIN LETTER BIDENTAL PERCUSSIVE +02AE..02AF ; valid # 4.0 LATIN SMALL LETTER TURNED H WITH FISHHOOK..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL +02B0 ; mapped ; 0068 # 1.1 MODIFIER LETTER SMALL H +02B1 ; mapped ; 0266 # 1.1 MODIFIER LETTER SMALL H WITH HOOK +02B2 ; mapped ; 006A # 1.1 MODIFIER LETTER SMALL J +02B3 ; mapped ; 0072 # 1.1 MODIFIER LETTER SMALL R +02B4 ; mapped ; 0279 # 1.1 MODIFIER LETTER SMALL TURNED R +02B5 ; mapped ; 027B # 1.1 MODIFIER LETTER SMALL TURNED R WITH HOOK +02B6 ; mapped ; 0281 # 1.1 MODIFIER LETTER SMALL CAPITAL INVERTED R +02B7 ; mapped ; 0077 # 1.1 MODIFIER LETTER SMALL W +02B8 ; mapped ; 0079 # 1.1 MODIFIER LETTER SMALL Y +02B9..02C1 ; valid # 1.1 MODIFIER LETTER PRIME..MODIFIER LETTER REVERSED GLOTTAL STOP +02C2..02C5 ; valid ; ; NV8 # 1.1 MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD +02C6..02D1 ; valid # 1.1 MODIFIER LETTER CIRCUMFLEX ACCENT..MODIFIER LETTER HALF TRIANGULAR COLON +02D2..02D7 ; valid ; ; NV8 # 1.1 MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN +02D8 ; disallowed_STD3_mapped ; 0020 0306 # 1.1 BREVE +02D9 ; disallowed_STD3_mapped ; 0020 0307 # 1.1 DOT ABOVE +02DA ; disallowed_STD3_mapped ; 0020 030A # 1.1 RING ABOVE +02DB ; disallowed_STD3_mapped ; 0020 0328 # 1.1 OGONEK +02DC ; disallowed_STD3_mapped ; 0020 0303 # 1.1 SMALL TILDE +02DD ; disallowed_STD3_mapped ; 0020 030B # 1.1 DOUBLE ACUTE ACCENT +02DE ; valid ; ; NV8 # 1.1 MODIFIER LETTER RHOTIC HOOK +02DF ; valid ; ; NV8 # 3.0 MODIFIER LETTER CROSS ACCENT +02E0 ; mapped ; 0263 # 1.1 MODIFIER LETTER SMALL GAMMA +02E1 ; mapped ; 006C # 1.1 MODIFIER LETTER SMALL L +02E2 ; mapped ; 0073 # 1.1 MODIFIER LETTER SMALL S +02E3 ; mapped ; 0078 # 1.1 MODIFIER LETTER SMALL X +02E4 ; mapped ; 0295 # 1.1 MODIFIER LETTER SMALL REVERSED GLOTTAL STOP +02E5..02E9 ; valid ; ; NV8 # 1.1 MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER EXTRA-LOW TONE BAR +02EA..02EB ; valid ; ; NV8 # 3.0 MODIFIER LETTER YIN DEPARTING TONE MARK..MODIFIER LETTER YANG DEPARTING TONE MARK +02EC ; valid # 3.0 MODIFIER LETTER VOICING +02ED ; valid ; ; NV8 # 3.0 MODIFIER LETTER UNASPIRATED +02EE ; valid # 3.0 MODIFIER LETTER DOUBLE APOSTROPHE +02EF..02FF ; valid ; ; NV8 # 4.0 MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW +0300..033F ; valid # 1.1 COMBINING GRAVE ACCENT..COMBINING DOUBLE OVERLINE +0340 ; mapped ; 0300 # 1.1 COMBINING GRAVE TONE MARK +0341 ; mapped ; 0301 # 1.1 COMBINING ACUTE TONE MARK +0342 ; valid # 1.1 COMBINING GREEK PERISPOMENI +0343 ; mapped ; 0313 # 1.1 COMBINING GREEK KORONIS +0344 ; mapped ; 0308 0301 # 1.1 COMBINING GREEK DIALYTIKA TONOS +0345 ; mapped ; 03B9 # 1.1 COMBINING GREEK YPOGEGRAMMENI +0346..034E ; valid # 3.0 COMBINING BRIDGE ABOVE..COMBINING UPWARDS ARROW BELOW +034F ; ignored # 3.2 COMBINING GRAPHEME JOINER +0350..0357 ; valid # 4.0 COMBINING RIGHT ARROWHEAD ABOVE..COMBINING RIGHT HALF RING ABOVE +0358..035C ; valid # 4.1 COMBINING DOT ABOVE RIGHT..COMBINING DOUBLE BREVE BELOW +035D..035F ; valid # 4.0 COMBINING DOUBLE BREVE..COMBINING DOUBLE MACRON BELOW +0360..0361 ; valid # 1.1 COMBINING DOUBLE TILDE..COMBINING DOUBLE INVERTED BREVE +0362 ; valid # 3.0 COMBINING DOUBLE RIGHTWARDS ARROW BELOW +0363..036F ; valid # 3.2 COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X +0370 ; mapped ; 0371 # 5.1 GREEK CAPITAL LETTER HETA +0371 ; valid # 5.1 GREEK SMALL LETTER HETA +0372 ; mapped ; 0373 # 5.1 GREEK CAPITAL LETTER ARCHAIC SAMPI +0373 ; valid # 5.1 GREEK SMALL LETTER ARCHAIC SAMPI +0374 ; mapped ; 02B9 # 1.1 GREEK NUMERAL SIGN +0375 ; valid # 1.1 GREEK LOWER NUMERAL SIGN +0376 ; mapped ; 0377 # 5.1 GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA +0377 ; valid # 5.1 GREEK SMALL LETTER PAMPHYLIAN DIGAMMA +0378..0379 ; disallowed # NA .. +037A ; disallowed_STD3_mapped ; 0020 03B9 # 1.1 GREEK YPOGEGRAMMENI +037B..037D ; valid # 5.0 GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL +037E ; disallowed_STD3_mapped ; 003B # 1.1 GREEK QUESTION MARK +037F ; mapped ; 03F3 # 7.0 GREEK CAPITAL LETTER YOT +0380..0383 ; disallowed # NA .. +0384 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK TONOS +0385 ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA TONOS +0386 ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH TONOS +0387 ; mapped ; 00B7 # 1.1 GREEK ANO TELEIA +0388 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH TONOS +0389 ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH TONOS +038A ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH TONOS +038B ; disallowed # NA +038C ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH TONOS +038D ; disallowed # NA +038E ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH TONOS +038F ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH TONOS +0390 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS +0391 ; mapped ; 03B1 # 1.1 GREEK CAPITAL LETTER ALPHA +0392 ; mapped ; 03B2 # 1.1 GREEK CAPITAL LETTER BETA +0393 ; mapped ; 03B3 # 1.1 GREEK CAPITAL LETTER GAMMA +0394 ; mapped ; 03B4 # 1.1 GREEK CAPITAL LETTER DELTA +0395 ; mapped ; 03B5 # 1.1 GREEK CAPITAL LETTER EPSILON +0396 ; mapped ; 03B6 # 1.1 GREEK CAPITAL LETTER ZETA +0397 ; mapped ; 03B7 # 1.1 GREEK CAPITAL LETTER ETA +0398 ; mapped ; 03B8 # 1.1 GREEK CAPITAL LETTER THETA +0399 ; mapped ; 03B9 # 1.1 GREEK CAPITAL LETTER IOTA +039A ; mapped ; 03BA # 1.1 GREEK CAPITAL LETTER KAPPA +039B ; mapped ; 03BB # 1.1 GREEK CAPITAL LETTER LAMDA +039C ; mapped ; 03BC # 1.1 GREEK CAPITAL LETTER MU +039D ; mapped ; 03BD # 1.1 GREEK CAPITAL LETTER NU +039E ; mapped ; 03BE # 1.1 GREEK CAPITAL LETTER XI +039F ; mapped ; 03BF # 1.1 GREEK CAPITAL LETTER OMICRON +03A0 ; mapped ; 03C0 # 1.1 GREEK CAPITAL LETTER PI +03A1 ; mapped ; 03C1 # 1.1 GREEK CAPITAL LETTER RHO +03A2 ; disallowed # NA +03A3 ; mapped ; 03C3 # 1.1 GREEK CAPITAL LETTER SIGMA +03A4 ; mapped ; 03C4 # 1.1 GREEK CAPITAL LETTER TAU +03A5 ; mapped ; 03C5 # 1.1 GREEK CAPITAL LETTER UPSILON +03A6 ; mapped ; 03C6 # 1.1 GREEK CAPITAL LETTER PHI +03A7 ; mapped ; 03C7 # 1.1 GREEK CAPITAL LETTER CHI +03A8 ; mapped ; 03C8 # 1.1 GREEK CAPITAL LETTER PSI +03A9 ; mapped ; 03C9 # 1.1 GREEK CAPITAL LETTER OMEGA +03AA ; mapped ; 03CA # 1.1 GREEK CAPITAL LETTER IOTA WITH DIALYTIKA +03AB ; mapped ; 03CB # 1.1 GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA +03AC..03C1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH TONOS..GREEK SMALL LETTER RHO +03C2 ; deviation ; 03C3 # 1.1 GREEK SMALL LETTER FINAL SIGMA +03C3..03CE ; valid # 1.1 GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA WITH TONOS +03CF ; mapped ; 03D7 # 5.1 GREEK CAPITAL KAI SYMBOL +03D0 ; mapped ; 03B2 # 1.1 GREEK BETA SYMBOL +03D1 ; mapped ; 03B8 # 1.1 GREEK THETA SYMBOL +03D2 ; mapped ; 03C5 # 1.1 GREEK UPSILON WITH HOOK SYMBOL +03D3 ; mapped ; 03CD # 1.1 GREEK UPSILON WITH ACUTE AND HOOK SYMBOL +03D4 ; mapped ; 03CB # 1.1 GREEK UPSILON WITH DIAERESIS AND HOOK SYMBOL +03D5 ; mapped ; 03C6 # 1.1 GREEK PHI SYMBOL +03D6 ; mapped ; 03C0 # 1.1 GREEK PI SYMBOL +03D7 ; valid # 3.0 GREEK KAI SYMBOL +03D8 ; mapped ; 03D9 # 3.2 GREEK LETTER ARCHAIC KOPPA +03D9 ; valid # 3.2 GREEK SMALL LETTER ARCHAIC KOPPA +03DA ; mapped ; 03DB # 1.1 GREEK LETTER STIGMA +03DB ; valid # 3.0 GREEK SMALL LETTER STIGMA +03DC ; mapped ; 03DD # 1.1 GREEK LETTER DIGAMMA +03DD ; valid # 3.0 GREEK SMALL LETTER DIGAMMA +03DE ; mapped ; 03DF # 1.1 GREEK LETTER KOPPA +03DF ; valid # 3.0 GREEK SMALL LETTER KOPPA +03E0 ; mapped ; 03E1 # 1.1 GREEK LETTER SAMPI +03E1 ; valid # 3.0 GREEK SMALL LETTER SAMPI +03E2 ; mapped ; 03E3 # 1.1 COPTIC CAPITAL LETTER SHEI +03E3 ; valid # 1.1 COPTIC SMALL LETTER SHEI +03E4 ; mapped ; 03E5 # 1.1 COPTIC CAPITAL LETTER FEI +03E5 ; valid # 1.1 COPTIC SMALL LETTER FEI +03E6 ; mapped ; 03E7 # 1.1 COPTIC CAPITAL LETTER KHEI +03E7 ; valid # 1.1 COPTIC SMALL LETTER KHEI +03E8 ; mapped ; 03E9 # 1.1 COPTIC CAPITAL LETTER HORI +03E9 ; valid # 1.1 COPTIC SMALL LETTER HORI +03EA ; mapped ; 03EB # 1.1 COPTIC CAPITAL LETTER GANGIA +03EB ; valid # 1.1 COPTIC SMALL LETTER GANGIA +03EC ; mapped ; 03ED # 1.1 COPTIC CAPITAL LETTER SHIMA +03ED ; valid # 1.1 COPTIC SMALL LETTER SHIMA +03EE ; mapped ; 03EF # 1.1 COPTIC CAPITAL LETTER DEI +03EF ; valid # 1.1 COPTIC SMALL LETTER DEI +03F0 ; mapped ; 03BA # 1.1 GREEK KAPPA SYMBOL +03F1 ; mapped ; 03C1 # 1.1 GREEK RHO SYMBOL +03F2 ; mapped ; 03C3 # 1.1 GREEK LUNATE SIGMA SYMBOL +03F3 ; valid # 1.1 GREEK LETTER YOT +03F4 ; mapped ; 03B8 # 3.1 GREEK CAPITAL THETA SYMBOL +03F5 ; mapped ; 03B5 # 3.1 GREEK LUNATE EPSILON SYMBOL +03F6 ; valid ; ; NV8 # 3.2 GREEK REVERSED LUNATE EPSILON SYMBOL +03F7 ; mapped ; 03F8 # 4.0 GREEK CAPITAL LETTER SHO +03F8 ; valid # 4.0 GREEK SMALL LETTER SHO +03F9 ; mapped ; 03C3 # 4.0 GREEK CAPITAL LUNATE SIGMA SYMBOL +03FA ; mapped ; 03FB # 4.0 GREEK CAPITAL LETTER SAN +03FB ; valid # 4.0 GREEK SMALL LETTER SAN +03FC ; valid # 4.1 GREEK RHO WITH STROKE SYMBOL +03FD ; mapped ; 037B # 4.1 GREEK CAPITAL REVERSED LUNATE SIGMA SYMBOL +03FE ; mapped ; 037C # 4.1 GREEK CAPITAL DOTTED LUNATE SIGMA SYMBOL +03FF ; mapped ; 037D # 4.1 GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL +0400 ; mapped ; 0450 # 3.0 CYRILLIC CAPITAL LETTER IE WITH GRAVE +0401 ; mapped ; 0451 # 1.1 CYRILLIC CAPITAL LETTER IO +0402 ; mapped ; 0452 # 1.1 CYRILLIC CAPITAL LETTER DJE +0403 ; mapped ; 0453 # 1.1 CYRILLIC CAPITAL LETTER GJE +0404 ; mapped ; 0454 # 1.1 CYRILLIC CAPITAL LETTER UKRAINIAN IE +0405 ; mapped ; 0455 # 1.1 CYRILLIC CAPITAL LETTER DZE +0406 ; mapped ; 0456 # 1.1 CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I +0407 ; mapped ; 0457 # 1.1 CYRILLIC CAPITAL LETTER YI +0408 ; mapped ; 0458 # 1.1 CYRILLIC CAPITAL LETTER JE +0409 ; mapped ; 0459 # 1.1 CYRILLIC CAPITAL LETTER LJE +040A ; mapped ; 045A # 1.1 CYRILLIC CAPITAL LETTER NJE +040B ; mapped ; 045B # 1.1 CYRILLIC CAPITAL LETTER TSHE +040C ; mapped ; 045C # 1.1 CYRILLIC CAPITAL LETTER KJE +040D ; mapped ; 045D # 3.0 CYRILLIC CAPITAL LETTER I WITH GRAVE +040E ; mapped ; 045E # 1.1 CYRILLIC CAPITAL LETTER SHORT U +040F ; mapped ; 045F # 1.1 CYRILLIC CAPITAL LETTER DZHE +0410 ; mapped ; 0430 # 1.1 CYRILLIC CAPITAL LETTER A +0411 ; mapped ; 0431 # 1.1 CYRILLIC CAPITAL LETTER BE +0412 ; mapped ; 0432 # 1.1 CYRILLIC CAPITAL LETTER VE +0413 ; mapped ; 0433 # 1.1 CYRILLIC CAPITAL LETTER GHE +0414 ; mapped ; 0434 # 1.1 CYRILLIC CAPITAL LETTER DE +0415 ; mapped ; 0435 # 1.1 CYRILLIC CAPITAL LETTER IE +0416 ; mapped ; 0436 # 1.1 CYRILLIC CAPITAL LETTER ZHE +0417 ; mapped ; 0437 # 1.1 CYRILLIC CAPITAL LETTER ZE +0418 ; mapped ; 0438 # 1.1 CYRILLIC CAPITAL LETTER I +0419 ; mapped ; 0439 # 1.1 CYRILLIC CAPITAL LETTER SHORT I +041A ; mapped ; 043A # 1.1 CYRILLIC CAPITAL LETTER KA +041B ; mapped ; 043B # 1.1 CYRILLIC CAPITAL LETTER EL +041C ; mapped ; 043C # 1.1 CYRILLIC CAPITAL LETTER EM +041D ; mapped ; 043D # 1.1 CYRILLIC CAPITAL LETTER EN +041E ; mapped ; 043E # 1.1 CYRILLIC CAPITAL LETTER O +041F ; mapped ; 043F # 1.1 CYRILLIC CAPITAL LETTER PE +0420 ; mapped ; 0440 # 1.1 CYRILLIC CAPITAL LETTER ER +0421 ; mapped ; 0441 # 1.1 CYRILLIC CAPITAL LETTER ES +0422 ; mapped ; 0442 # 1.1 CYRILLIC CAPITAL LETTER TE +0423 ; mapped ; 0443 # 1.1 CYRILLIC CAPITAL LETTER U +0424 ; mapped ; 0444 # 1.1 CYRILLIC CAPITAL LETTER EF +0425 ; mapped ; 0445 # 1.1 CYRILLIC CAPITAL LETTER HA +0426 ; mapped ; 0446 # 1.1 CYRILLIC CAPITAL LETTER TSE +0427 ; mapped ; 0447 # 1.1 CYRILLIC CAPITAL LETTER CHE +0428 ; mapped ; 0448 # 1.1 CYRILLIC CAPITAL LETTER SHA +0429 ; mapped ; 0449 # 1.1 CYRILLIC CAPITAL LETTER SHCHA +042A ; mapped ; 044A # 1.1 CYRILLIC CAPITAL LETTER HARD SIGN +042B ; mapped ; 044B # 1.1 CYRILLIC CAPITAL LETTER YERU +042C ; mapped ; 044C # 1.1 CYRILLIC CAPITAL LETTER SOFT SIGN +042D ; mapped ; 044D # 1.1 CYRILLIC CAPITAL LETTER E +042E ; mapped ; 044E # 1.1 CYRILLIC CAPITAL LETTER YU +042F ; mapped ; 044F # 1.1 CYRILLIC CAPITAL LETTER YA +0430..044F ; valid # 1.1 CYRILLIC SMALL LETTER A..CYRILLIC SMALL LETTER YA +0450 ; valid # 3.0 CYRILLIC SMALL LETTER IE WITH GRAVE +0451..045C ; valid # 1.1 CYRILLIC SMALL LETTER IO..CYRILLIC SMALL LETTER KJE +045D ; valid # 3.0 CYRILLIC SMALL LETTER I WITH GRAVE +045E..045F ; valid # 1.1 CYRILLIC SMALL LETTER SHORT U..CYRILLIC SMALL LETTER DZHE +0460 ; mapped ; 0461 # 1.1 CYRILLIC CAPITAL LETTER OMEGA +0461 ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA +0462 ; mapped ; 0463 # 1.1 CYRILLIC CAPITAL LETTER YAT +0463 ; valid # 1.1 CYRILLIC SMALL LETTER YAT +0464 ; mapped ; 0465 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED E +0465 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED E +0466 ; mapped ; 0467 # 1.1 CYRILLIC CAPITAL LETTER LITTLE YUS +0467 ; valid # 1.1 CYRILLIC SMALL LETTER LITTLE YUS +0468 ; mapped ; 0469 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED LITTLE YUS +0469 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED LITTLE YUS +046A ; mapped ; 046B # 1.1 CYRILLIC CAPITAL LETTER BIG YUS +046B ; valid # 1.1 CYRILLIC SMALL LETTER BIG YUS +046C ; mapped ; 046D # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED BIG YUS +046D ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED BIG YUS +046E ; mapped ; 046F # 1.1 CYRILLIC CAPITAL LETTER KSI +046F ; valid # 1.1 CYRILLIC SMALL LETTER KSI +0470 ; mapped ; 0471 # 1.1 CYRILLIC CAPITAL LETTER PSI +0471 ; valid # 1.1 CYRILLIC SMALL LETTER PSI +0472 ; mapped ; 0473 # 1.1 CYRILLIC CAPITAL LETTER FITA +0473 ; valid # 1.1 CYRILLIC SMALL LETTER FITA +0474 ; mapped ; 0475 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA +0475 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA +0476 ; mapped ; 0477 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT +0477 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT +0478 ; mapped ; 0479 # 1.1 CYRILLIC CAPITAL LETTER UK +0479 ; valid # 1.1 CYRILLIC SMALL LETTER UK +047A ; mapped ; 047B # 1.1 CYRILLIC CAPITAL LETTER ROUND OMEGA +047B ; valid # 1.1 CYRILLIC SMALL LETTER ROUND OMEGA +047C ; mapped ; 047D # 1.1 CYRILLIC CAPITAL LETTER OMEGA WITH TITLO +047D ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA WITH TITLO +047E ; mapped ; 047F # 1.1 CYRILLIC CAPITAL LETTER OT +047F ; valid # 1.1 CYRILLIC SMALL LETTER OT +0480 ; mapped ; 0481 # 1.1 CYRILLIC CAPITAL LETTER KOPPA +0481 ; valid # 1.1 CYRILLIC SMALL LETTER KOPPA +0482 ; valid ; ; NV8 # 1.1 CYRILLIC THOUSANDS SIGN +0483..0486 ; valid # 1.1 COMBINING CYRILLIC TITLO..COMBINING CYRILLIC PSILI PNEUMATA +0487 ; valid # 5.1 COMBINING CYRILLIC POKRYTIE +0488..0489 ; valid ; ; NV8 # 3.0 COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN +048A ; mapped ; 048B # 3.2 CYRILLIC CAPITAL LETTER SHORT I WITH TAIL +048B ; valid # 3.2 CYRILLIC SMALL LETTER SHORT I WITH TAIL +048C ; mapped ; 048D # 3.0 CYRILLIC CAPITAL LETTER SEMISOFT SIGN +048D ; valid # 3.0 CYRILLIC SMALL LETTER SEMISOFT SIGN +048E ; mapped ; 048F # 3.0 CYRILLIC CAPITAL LETTER ER WITH TICK +048F ; valid # 3.0 CYRILLIC SMALL LETTER ER WITH TICK +0490 ; mapped ; 0491 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH UPTURN +0491 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH UPTURN +0492 ; mapped ; 0493 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH STROKE +0493 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH STROKE +0494 ; mapped ; 0495 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH MIDDLE HOOK +0495 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK +0496 ; mapped ; 0497 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER +0497 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DESCENDER +0498 ; mapped ; 0499 # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DESCENDER +0499 ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DESCENDER +049A ; mapped ; 049B # 1.1 CYRILLIC CAPITAL LETTER KA WITH DESCENDER +049B ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH DESCENDER +049C ; mapped ; 049D # 1.1 CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE +049D ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE +049E ; mapped ; 049F # 1.1 CYRILLIC CAPITAL LETTER KA WITH STROKE +049F ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH STROKE +04A0 ; mapped ; 04A1 # 1.1 CYRILLIC CAPITAL LETTER BASHKIR KA +04A1 ; valid # 1.1 CYRILLIC SMALL LETTER BASHKIR KA +04A2 ; mapped ; 04A3 # 1.1 CYRILLIC CAPITAL LETTER EN WITH DESCENDER +04A3 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH DESCENDER +04A4 ; mapped ; 04A5 # 1.1 CYRILLIC CAPITAL LIGATURE EN GHE +04A5 ; valid # 1.1 CYRILLIC SMALL LIGATURE EN GHE +04A6 ; mapped ; 04A7 # 1.1 CYRILLIC CAPITAL LETTER PE WITH MIDDLE HOOK +04A7 ; valid # 1.1 CYRILLIC SMALL LETTER PE WITH MIDDLE HOOK +04A8 ; mapped ; 04A9 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN HA +04A9 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN HA +04AA ; mapped ; 04AB # 1.1 CYRILLIC CAPITAL LETTER ES WITH DESCENDER +04AB ; valid # 1.1 CYRILLIC SMALL LETTER ES WITH DESCENDER +04AC ; mapped ; 04AD # 1.1 CYRILLIC CAPITAL LETTER TE WITH DESCENDER +04AD ; valid # 1.1 CYRILLIC SMALL LETTER TE WITH DESCENDER +04AE ; mapped ; 04AF # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U +04AF ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U +04B0 ; mapped ; 04B1 # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE +04B1 ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE +04B2 ; mapped ; 04B3 # 1.1 CYRILLIC CAPITAL LETTER HA WITH DESCENDER +04B3 ; valid # 1.1 CYRILLIC SMALL LETTER HA WITH DESCENDER +04B4 ; mapped ; 04B5 # 1.1 CYRILLIC CAPITAL LIGATURE TE TSE +04B5 ; valid # 1.1 CYRILLIC SMALL LIGATURE TE TSE +04B6 ; mapped ; 04B7 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DESCENDER +04B7 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DESCENDER +04B8 ; mapped ; 04B9 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE +04B9 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE +04BA ; mapped ; 04BB # 1.1 CYRILLIC CAPITAL LETTER SHHA +04BB ; valid # 1.1 CYRILLIC SMALL LETTER SHHA +04BC ; mapped ; 04BD # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE +04BD ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE +04BE ; mapped ; 04BF # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE WITH DESCENDER +04BF ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER +04C0 ; disallowed # 1.1 CYRILLIC LETTER PALOCHKA +04C1 ; mapped ; 04C2 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH BREVE +04C2 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH BREVE +04C3 ; mapped ; 04C4 # 1.1 CYRILLIC CAPITAL LETTER KA WITH HOOK +04C4 ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH HOOK +04C5 ; mapped ; 04C6 # 3.2 CYRILLIC CAPITAL LETTER EL WITH TAIL +04C6 ; valid # 3.2 CYRILLIC SMALL LETTER EL WITH TAIL +04C7 ; mapped ; 04C8 # 1.1 CYRILLIC CAPITAL LETTER EN WITH HOOK +04C8 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH HOOK +04C9 ; mapped ; 04CA # 3.2 CYRILLIC CAPITAL LETTER EN WITH TAIL +04CA ; valid # 3.2 CYRILLIC SMALL LETTER EN WITH TAIL +04CB ; mapped ; 04CC # 1.1 CYRILLIC CAPITAL LETTER KHAKASSIAN CHE +04CC ; valid # 1.1 CYRILLIC SMALL LETTER KHAKASSIAN CHE +04CD ; mapped ; 04CE # 3.2 CYRILLIC CAPITAL LETTER EM WITH TAIL +04CE ; valid # 3.2 CYRILLIC SMALL LETTER EM WITH TAIL +04CF ; valid # 5.0 CYRILLIC SMALL LETTER PALOCHKA +04D0 ; mapped ; 04D1 # 1.1 CYRILLIC CAPITAL LETTER A WITH BREVE +04D1 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH BREVE +04D2 ; mapped ; 04D3 # 1.1 CYRILLIC CAPITAL LETTER A WITH DIAERESIS +04D3 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH DIAERESIS +04D4 ; mapped ; 04D5 # 1.1 CYRILLIC CAPITAL LIGATURE A IE +04D5 ; valid # 1.1 CYRILLIC SMALL LIGATURE A IE +04D6 ; mapped ; 04D7 # 1.1 CYRILLIC CAPITAL LETTER IE WITH BREVE +04D7 ; valid # 1.1 CYRILLIC SMALL LETTER IE WITH BREVE +04D8 ; mapped ; 04D9 # 1.1 CYRILLIC CAPITAL LETTER SCHWA +04D9 ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA +04DA ; mapped ; 04DB # 1.1 CYRILLIC CAPITAL LETTER SCHWA WITH DIAERESIS +04DB ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA WITH DIAERESIS +04DC ; mapped ; 04DD # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS +04DD ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DIAERESIS +04DE ; mapped ; 04DF # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DIAERESIS +04DF ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DIAERESIS +04E0 ; mapped ; 04E1 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN DZE +04E1 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN DZE +04E2 ; mapped ; 04E3 # 1.1 CYRILLIC CAPITAL LETTER I WITH MACRON +04E3 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH MACRON +04E4 ; mapped ; 04E5 # 1.1 CYRILLIC CAPITAL LETTER I WITH DIAERESIS +04E5 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH DIAERESIS +04E6 ; mapped ; 04E7 # 1.1 CYRILLIC CAPITAL LETTER O WITH DIAERESIS +04E7 ; valid # 1.1 CYRILLIC SMALL LETTER O WITH DIAERESIS +04E8 ; mapped ; 04E9 # 1.1 CYRILLIC CAPITAL LETTER BARRED O +04E9 ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O +04EA ; mapped ; 04EB # 1.1 CYRILLIC CAPITAL LETTER BARRED O WITH DIAERESIS +04EB ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O WITH DIAERESIS +04EC ; mapped ; 04ED # 3.0 CYRILLIC CAPITAL LETTER E WITH DIAERESIS +04ED ; valid # 3.0 CYRILLIC SMALL LETTER E WITH DIAERESIS +04EE ; mapped ; 04EF # 1.1 CYRILLIC CAPITAL LETTER U WITH MACRON +04EF ; valid # 1.1 CYRILLIC SMALL LETTER U WITH MACRON +04F0 ; mapped ; 04F1 # 1.1 CYRILLIC CAPITAL LETTER U WITH DIAERESIS +04F1 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DIAERESIS +04F2 ; mapped ; 04F3 # 1.1 CYRILLIC CAPITAL LETTER U WITH DOUBLE ACUTE +04F3 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DOUBLE ACUTE +04F4 ; mapped ; 04F5 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DIAERESIS +04F5 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DIAERESIS +04F6 ; mapped ; 04F7 # 4.1 CYRILLIC CAPITAL LETTER GHE WITH DESCENDER +04F7 ; valid # 4.1 CYRILLIC SMALL LETTER GHE WITH DESCENDER +04F8 ; mapped ; 04F9 # 1.1 CYRILLIC CAPITAL LETTER YERU WITH DIAERESIS +04F9 ; valid # 1.1 CYRILLIC SMALL LETTER YERU WITH DIAERESIS +04FA ; mapped ; 04FB # 5.0 CYRILLIC CAPITAL LETTER GHE WITH STROKE AND HOOK +04FB ; valid # 5.0 CYRILLIC SMALL LETTER GHE WITH STROKE AND HOOK +04FC ; mapped ; 04FD # 5.0 CYRILLIC CAPITAL LETTER HA WITH HOOK +04FD ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH HOOK +04FE ; mapped ; 04FF # 5.0 CYRILLIC CAPITAL LETTER HA WITH STROKE +04FF ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH STROKE +0500 ; mapped ; 0501 # 3.2 CYRILLIC CAPITAL LETTER KOMI DE +0501 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DE +0502 ; mapped ; 0503 # 3.2 CYRILLIC CAPITAL LETTER KOMI DJE +0503 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DJE +0504 ; mapped ; 0505 # 3.2 CYRILLIC CAPITAL LETTER KOMI ZJE +0505 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI ZJE +0506 ; mapped ; 0507 # 3.2 CYRILLIC CAPITAL LETTER KOMI DZJE +0507 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DZJE +0508 ; mapped ; 0509 # 3.2 CYRILLIC CAPITAL LETTER KOMI LJE +0509 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI LJE +050A ; mapped ; 050B # 3.2 CYRILLIC CAPITAL LETTER KOMI NJE +050B ; valid # 3.2 CYRILLIC SMALL LETTER KOMI NJE +050C ; mapped ; 050D # 3.2 CYRILLIC CAPITAL LETTER KOMI SJE +050D ; valid # 3.2 CYRILLIC SMALL LETTER KOMI SJE +050E ; mapped ; 050F # 3.2 CYRILLIC CAPITAL LETTER KOMI TJE +050F ; valid # 3.2 CYRILLIC SMALL LETTER KOMI TJE +0510 ; mapped ; 0511 # 5.0 CYRILLIC CAPITAL LETTER REVERSED ZE +0511 ; valid # 5.0 CYRILLIC SMALL LETTER REVERSED ZE +0512 ; mapped ; 0513 # 5.0 CYRILLIC CAPITAL LETTER EL WITH HOOK +0513 ; valid # 5.0 CYRILLIC SMALL LETTER EL WITH HOOK +0514 ; mapped ; 0515 # 5.1 CYRILLIC CAPITAL LETTER LHA +0515 ; valid # 5.1 CYRILLIC SMALL LETTER LHA +0516 ; mapped ; 0517 # 5.1 CYRILLIC CAPITAL LETTER RHA +0517 ; valid # 5.1 CYRILLIC SMALL LETTER RHA +0518 ; mapped ; 0519 # 5.1 CYRILLIC CAPITAL LETTER YAE +0519 ; valid # 5.1 CYRILLIC SMALL LETTER YAE +051A ; mapped ; 051B # 5.1 CYRILLIC CAPITAL LETTER QA +051B ; valid # 5.1 CYRILLIC SMALL LETTER QA +051C ; mapped ; 051D # 5.1 CYRILLIC CAPITAL LETTER WE +051D ; valid # 5.1 CYRILLIC SMALL LETTER WE +051E ; mapped ; 051F # 5.1 CYRILLIC CAPITAL LETTER ALEUT KA +051F ; valid # 5.1 CYRILLIC SMALL LETTER ALEUT KA +0520 ; mapped ; 0521 # 5.1 CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK +0521 ; valid # 5.1 CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK +0522 ; mapped ; 0523 # 5.1 CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK +0523 ; valid # 5.1 CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK +0524 ; mapped ; 0525 # 5.2 CYRILLIC CAPITAL LETTER PE WITH DESCENDER +0525 ; valid # 5.2 CYRILLIC SMALL LETTER PE WITH DESCENDER +0526 ; mapped ; 0527 # 6.0 CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER +0527 ; valid # 6.0 CYRILLIC SMALL LETTER SHHA WITH DESCENDER +0528 ; mapped ; 0529 # 7.0 CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK +0529 ; valid # 7.0 CYRILLIC SMALL LETTER EN WITH LEFT HOOK +052A ; mapped ; 052B # 7.0 CYRILLIC CAPITAL LETTER DZZHE +052B ; valid # 7.0 CYRILLIC SMALL LETTER DZZHE +052C ; mapped ; 052D # 7.0 CYRILLIC CAPITAL LETTER DCHE +052D ; valid # 7.0 CYRILLIC SMALL LETTER DCHE +052E ; mapped ; 052F # 7.0 CYRILLIC CAPITAL LETTER EL WITH DESCENDER +052F ; valid # 7.0 CYRILLIC SMALL LETTER EL WITH DESCENDER +0530 ; disallowed # NA +0531 ; mapped ; 0561 # 1.1 ARMENIAN CAPITAL LETTER AYB +0532 ; mapped ; 0562 # 1.1 ARMENIAN CAPITAL LETTER BEN +0533 ; mapped ; 0563 # 1.1 ARMENIAN CAPITAL LETTER GIM +0534 ; mapped ; 0564 # 1.1 ARMENIAN CAPITAL LETTER DA +0535 ; mapped ; 0565 # 1.1 ARMENIAN CAPITAL LETTER ECH +0536 ; mapped ; 0566 # 1.1 ARMENIAN CAPITAL LETTER ZA +0537 ; mapped ; 0567 # 1.1 ARMENIAN CAPITAL LETTER EH +0538 ; mapped ; 0568 # 1.1 ARMENIAN CAPITAL LETTER ET +0539 ; mapped ; 0569 # 1.1 ARMENIAN CAPITAL LETTER TO +053A ; mapped ; 056A # 1.1 ARMENIAN CAPITAL LETTER ZHE +053B ; mapped ; 056B # 1.1 ARMENIAN CAPITAL LETTER INI +053C ; mapped ; 056C # 1.1 ARMENIAN CAPITAL LETTER LIWN +053D ; mapped ; 056D # 1.1 ARMENIAN CAPITAL LETTER XEH +053E ; mapped ; 056E # 1.1 ARMENIAN CAPITAL LETTER CA +053F ; mapped ; 056F # 1.1 ARMENIAN CAPITAL LETTER KEN +0540 ; mapped ; 0570 # 1.1 ARMENIAN CAPITAL LETTER HO +0541 ; mapped ; 0571 # 1.1 ARMENIAN CAPITAL LETTER JA +0542 ; mapped ; 0572 # 1.1 ARMENIAN CAPITAL LETTER GHAD +0543 ; mapped ; 0573 # 1.1 ARMENIAN CAPITAL LETTER CHEH +0544 ; mapped ; 0574 # 1.1 ARMENIAN CAPITAL LETTER MEN +0545 ; mapped ; 0575 # 1.1 ARMENIAN CAPITAL LETTER YI +0546 ; mapped ; 0576 # 1.1 ARMENIAN CAPITAL LETTER NOW +0547 ; mapped ; 0577 # 1.1 ARMENIAN CAPITAL LETTER SHA +0548 ; mapped ; 0578 # 1.1 ARMENIAN CAPITAL LETTER VO +0549 ; mapped ; 0579 # 1.1 ARMENIAN CAPITAL LETTER CHA +054A ; mapped ; 057A # 1.1 ARMENIAN CAPITAL LETTER PEH +054B ; mapped ; 057B # 1.1 ARMENIAN CAPITAL LETTER JHEH +054C ; mapped ; 057C # 1.1 ARMENIAN CAPITAL LETTER RA +054D ; mapped ; 057D # 1.1 ARMENIAN CAPITAL LETTER SEH +054E ; mapped ; 057E # 1.1 ARMENIAN CAPITAL LETTER VEW +054F ; mapped ; 057F # 1.1 ARMENIAN CAPITAL LETTER TIWN +0550 ; mapped ; 0580 # 1.1 ARMENIAN CAPITAL LETTER REH +0551 ; mapped ; 0581 # 1.1 ARMENIAN CAPITAL LETTER CO +0552 ; mapped ; 0582 # 1.1 ARMENIAN CAPITAL LETTER YIWN +0553 ; mapped ; 0583 # 1.1 ARMENIAN CAPITAL LETTER PIWR +0554 ; mapped ; 0584 # 1.1 ARMENIAN CAPITAL LETTER KEH +0555 ; mapped ; 0585 # 1.1 ARMENIAN CAPITAL LETTER OH +0556 ; mapped ; 0586 # 1.1 ARMENIAN CAPITAL LETTER FEH +0557..0558 ; disallowed # NA .. +0559 ; valid # 1.1 ARMENIAN MODIFIER LETTER LEFT HALF RING +055A..055F ; valid ; ; NV8 # 1.1 ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK +0560 ; disallowed # NA +0561..0586 ; valid # 1.1 ARMENIAN SMALL LETTER AYB..ARMENIAN SMALL LETTER FEH +0587 ; mapped ; 0565 0582 # 1.1 ARMENIAN SMALL LIGATURE ECH YIWN +0588 ; disallowed # NA +0589 ; valid ; ; NV8 # 1.1 ARMENIAN FULL STOP +058A ; valid ; ; NV8 # 3.0 ARMENIAN HYPHEN +058B..058C ; disallowed # NA .. +058D..058E ; valid ; ; NV8 # 7.0 RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN +058F ; valid ; ; NV8 # 6.1 ARMENIAN DRAM SIGN +0590 ; disallowed # NA +0591..05A1 ; valid # 2.0 HEBREW ACCENT ETNAHTA..HEBREW ACCENT PAZER +05A2 ; valid # 4.1 HEBREW ACCENT ATNAH HAFUKH +05A3..05AF ; valid # 2.0 HEBREW ACCENT MUNAH..HEBREW MARK MASORA CIRCLE +05B0..05B9 ; valid # 1.1 HEBREW POINT SHEVA..HEBREW POINT HOLAM +05BA ; valid # 5.0 HEBREW POINT HOLAM HASER FOR VAV +05BB..05BD ; valid # 1.1 HEBREW POINT QUBUTS..HEBREW POINT METEG +05BE ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION MAQAF +05BF ; valid # 1.1 HEBREW POINT RAFE +05C0 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION PASEQ +05C1..05C2 ; valid # 1.1 HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT +05C3 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION SOF PASUQ +05C4 ; valid # 2.0 HEBREW MARK UPPER DOT +05C5 ; valid # 4.1 HEBREW MARK LOWER DOT +05C6 ; valid ; ; NV8 # 4.1 HEBREW PUNCTUATION NUN HAFUKHA +05C7 ; valid # 4.1 HEBREW POINT QAMATS QATAN +05C8..05CF ; disallowed # NA .. +05D0..05EA ; valid # 1.1 HEBREW LETTER ALEF..HEBREW LETTER TAV +05EB..05EF ; disallowed # NA .. +05F0..05F4 ; valid # 1.1 HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW PUNCTUATION GERSHAYIM +05F5..05FF ; disallowed # NA .. +0600..0603 ; disallowed # 4.0 ARABIC NUMBER SIGN..ARABIC SIGN SAFHA +0604 ; disallowed # 6.1 ARABIC SIGN SAMVAT +0605 ; disallowed # 7.0 ARABIC NUMBER MARK ABOVE +0606..060A ; valid ; ; NV8 # 5.1 ARABIC-INDIC CUBE ROOT..ARABIC-INDIC PER TEN THOUSAND SIGN +060B ; valid ; ; NV8 # 4.1 AFGHANI SIGN +060C ; valid ; ; NV8 # 1.1 ARABIC COMMA +060D..060F ; valid ; ; NV8 # 4.0 ARABIC DATE SEPARATOR..ARABIC SIGN MISRA +0610..0615 ; valid # 4.0 ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL HIGH TAH +0616..061A ; valid # 5.1 ARABIC SMALL HIGH LIGATURE ALEF WITH LAM WITH YEH..ARABIC SMALL KASRA +061B ; valid ; ; NV8 # 1.1 ARABIC SEMICOLON +061C ; disallowed # 6.3 ARABIC LETTER MARK +061D ; disallowed # NA +061E ; valid ; ; NV8 # 4.1 ARABIC TRIPLE DOT PUNCTUATION MARK +061F ; valid ; ; NV8 # 1.1 ARABIC QUESTION MARK +0620 ; valid # 6.0 ARABIC LETTER KASHMIRI YEH +0621..063A ; valid # 1.1 ARABIC LETTER HAMZA..ARABIC LETTER GHAIN +063B..063F ; valid # 5.1 ARABIC LETTER KEHEH WITH TWO DOTS ABOVE..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE +0640 ; valid ; ; NV8 # 1.1 ARABIC TATWEEL +0641..0652 ; valid # 1.1 ARABIC LETTER FEH..ARABIC SUKUN +0653..0655 ; valid # 3.0 ARABIC MADDAH ABOVE..ARABIC HAMZA BELOW +0656..0658 ; valid # 4.0 ARABIC SUBSCRIPT ALEF..ARABIC MARK NOON GHUNNA +0659..065E ; valid # 4.1 ARABIC ZWARAKAY..ARABIC FATHA WITH TWO DOTS +065F ; valid # 6.0 ARABIC WAVY HAMZA BELOW +0660..0669 ; valid # 1.1 ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE +066A..066D ; valid ; ; NV8 # 1.1 ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR +066E..066F ; valid # 3.2 ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF +0670..0674 ; valid # 1.1 ARABIC LETTER SUPERSCRIPT ALEF..ARABIC LETTER HIGH HAMZA +0675 ; mapped ; 0627 0674 # 1.1 ARABIC LETTER HIGH HAMZA ALEF +0676 ; mapped ; 0648 0674 # 1.1 ARABIC LETTER HIGH HAMZA WAW +0677 ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE +0678 ; mapped ; 064A 0674 # 1.1 ARABIC LETTER HIGH HAMZA YEH +0679..06B7 ; valid # 1.1 ARABIC LETTER TTEH..ARABIC LETTER LAM WITH THREE DOTS ABOVE +06B8..06B9 ; valid # 3.0 ARABIC LETTER LAM WITH THREE DOTS BELOW..ARABIC LETTER NOON WITH DOT BELOW +06BA..06BE ; valid # 1.1 ARABIC LETTER NOON GHUNNA..ARABIC LETTER HEH DOACHASHMEE +06BF ; valid # 3.0 ARABIC LETTER TCHEH WITH DOT ABOVE +06C0..06CE ; valid # 1.1 ARABIC LETTER HEH WITH YEH ABOVE..ARABIC LETTER YEH WITH SMALL V +06CF ; valid # 3.0 ARABIC LETTER WAW WITH DOT ABOVE +06D0..06D3 ; valid # 1.1 ARABIC LETTER E..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE +06D4 ; valid ; ; NV8 # 1.1 ARABIC FULL STOP +06D5..06DC ; valid # 1.1 ARABIC LETTER AE..ARABIC SMALL HIGH SEEN +06DD ; disallowed # 1.1 ARABIC END OF AYAH +06DE ; valid ; ; NV8 # 1.1 ARABIC START OF RUB EL HIZB +06DF..06E8 ; valid # 1.1 ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH NOON +06E9 ; valid ; ; NV8 # 1.1 ARABIC PLACE OF SAJDAH +06EA..06ED ; valid # 1.1 ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM +06EE..06EF ; valid # 4.0 ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V +06F0..06F9 ; valid # 1.1 EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE +06FA..06FE ; valid # 3.0 ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC SIGN SINDHI POSTPOSITION MEN +06FF ; valid # 4.0 ARABIC LETTER HEH WITH INVERTED V +0700..070D ; valid ; ; NV8 # 3.0 SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS +070E ; disallowed # NA +070F ; disallowed # 3.0 SYRIAC ABBREVIATION MARK +0710..072C ; valid # 3.0 SYRIAC LETTER ALAPH..SYRIAC LETTER TAW +072D..072F ; valid # 4.0 SYRIAC LETTER PERSIAN BHETH..SYRIAC LETTER PERSIAN DHALATH +0730..074A ; valid # 3.0 SYRIAC PTHAHA ABOVE..SYRIAC BARREKH +074B..074C ; disallowed # NA .. +074D..074F ; valid # 4.0 SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE +0750..076D ; valid # 4.1 ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER SEEN WITH TWO DOTS VERTICALLY ABOVE +076E..077F ; valid # 5.1 ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE +0780..07B0 ; valid # 3.0 THAANA LETTER HAA..THAANA SUKUN +07B1 ; valid # 3.2 THAANA LETTER NAA +07B2..07BF ; disallowed # NA .. +07C0..07F5 ; valid # 5.0 NKO DIGIT ZERO..NKO LOW TONE APOSTROPHE +07F6..07FA ; valid ; ; NV8 # 5.0 NKO SYMBOL OO DENNEN..NKO LAJANYALAN +07FB..07FF ; disallowed # NA .. +0800..082D ; valid # 5.2 SAMARITAN LETTER ALAF..SAMARITAN MARK NEQUDAA +082E..082F ; disallowed # NA .. +0830..083E ; valid ; ; NV8 # 5.2 SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU +083F ; disallowed # NA +0840..085B ; valid # 6.0 MANDAIC LETTER HALQA..MANDAIC GEMINATION MARK +085C..085D ; disallowed # NA .. +085E ; valid ; ; NV8 # 6.0 MANDAIC PUNCTUATION +085F..089F ; disallowed # NA .. +08A0 ; valid # 6.1 ARABIC LETTER BEH WITH SMALL V BELOW +08A1 ; valid # 7.0 ARABIC LETTER BEH WITH HAMZA ABOVE +08A2..08AC ; valid # 6.1 ARABIC LETTER JEEM WITH TWO DOTS ABOVE..ARABIC LETTER ROHINGYA YEH +08AD..08B2 ; valid # 7.0 ARABIC LETTER LOW ALEF..ARABIC LETTER ZAIN WITH INVERTED V ABOVE +08B3..08B4 ; valid # 8.0 ARABIC LETTER AIN WITH THREE DOTS BELOW..ARABIC LETTER KAF WITH DOT BELOW +08B5 ; disallowed # NA +08B6..08BD ; valid # 9.0 ARABIC LETTER BEH WITH SMALL MEEM ABOVE..ARABIC LETTER AFRICAN NOON +08BE..08D3 ; disallowed # NA .. +08D4..08E1 ; valid # 9.0 ARABIC SMALL HIGH WORD AR-RUB..ARABIC SMALL HIGH SIGN SAFHA +08E2 ; disallowed # 9.0 ARABIC DISPUTED END OF AYAH +08E3 ; valid # 8.0 ARABIC TURNED DAMMA BELOW +08E4..08FE ; valid # 6.1 ARABIC CURLY FATHA..ARABIC DAMMA WITH DOT +08FF ; valid # 7.0 ARABIC MARK SIDEWAYS NOON GHUNNA +0900 ; valid # 5.2 DEVANAGARI SIGN INVERTED CANDRABINDU +0901..0903 ; valid # 1.1 DEVANAGARI SIGN CANDRABINDU..DEVANAGARI SIGN VISARGA +0904 ; valid # 4.0 DEVANAGARI LETTER SHORT A +0905..0939 ; valid # 1.1 DEVANAGARI LETTER A..DEVANAGARI LETTER HA +093A..093B ; valid # 6.0 DEVANAGARI VOWEL SIGN OE..DEVANAGARI VOWEL SIGN OOE +093C..094D ; valid # 1.1 DEVANAGARI SIGN NUKTA..DEVANAGARI SIGN VIRAMA +094E ; valid # 5.2 DEVANAGARI VOWEL SIGN PRISHTHAMATRA E +094F ; valid # 6.0 DEVANAGARI VOWEL SIGN AW +0950..0954 ; valid # 1.1 DEVANAGARI OM..DEVANAGARI ACUTE ACCENT +0955 ; valid # 5.2 DEVANAGARI VOWEL SIGN CANDRA LONG E +0956..0957 ; valid # 6.0 DEVANAGARI VOWEL SIGN UE..DEVANAGARI VOWEL SIGN UUE +0958 ; mapped ; 0915 093C # 1.1 DEVANAGARI LETTER QA +0959 ; mapped ; 0916 093C # 1.1 DEVANAGARI LETTER KHHA +095A ; mapped ; 0917 093C # 1.1 DEVANAGARI LETTER GHHA +095B ; mapped ; 091C 093C # 1.1 DEVANAGARI LETTER ZA +095C ; mapped ; 0921 093C # 1.1 DEVANAGARI LETTER DDDHA +095D ; mapped ; 0922 093C # 1.1 DEVANAGARI LETTER RHA +095E ; mapped ; 092B 093C # 1.1 DEVANAGARI LETTER FA +095F ; mapped ; 092F 093C # 1.1 DEVANAGARI LETTER YYA +0960..0963 ; valid # 1.1 DEVANAGARI LETTER VOCALIC RR..DEVANAGARI VOWEL SIGN VOCALIC LL +0964..0965 ; valid ; ; NV8 # 1.1 DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA +0966..096F ; valid # 1.1 DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE +0970 ; valid ; ; NV8 # 1.1 DEVANAGARI ABBREVIATION SIGN +0971..0972 ; valid # 5.1 DEVANAGARI SIGN HIGH SPACING DOT..DEVANAGARI LETTER CANDRA A +0973..0977 ; valid # 6.0 DEVANAGARI LETTER OE..DEVANAGARI LETTER UUE +0978 ; valid # 7.0 DEVANAGARI LETTER MARWARI DDA +0979..097A ; valid # 5.2 DEVANAGARI LETTER ZHA..DEVANAGARI LETTER HEAVY YA +097B..097C ; valid # 5.0 DEVANAGARI LETTER GGA..DEVANAGARI LETTER JJA +097D ; valid # 4.1 DEVANAGARI LETTER GLOTTAL STOP +097E..097F ; valid # 5.0 DEVANAGARI LETTER DDDA..DEVANAGARI LETTER BBA +0980 ; valid # 7.0 BENGALI ANJI +0981..0983 ; valid # 1.1 BENGALI SIGN CANDRABINDU..BENGALI SIGN VISARGA +0984 ; disallowed # NA +0985..098C ; valid # 1.1 BENGALI LETTER A..BENGALI LETTER VOCALIC L +098D..098E ; disallowed # NA .. +098F..0990 ; valid # 1.1 BENGALI LETTER E..BENGALI LETTER AI +0991..0992 ; disallowed # NA .. +0993..09A8 ; valid # 1.1 BENGALI LETTER O..BENGALI LETTER NA +09A9 ; disallowed # NA +09AA..09B0 ; valid # 1.1 BENGALI LETTER PA..BENGALI LETTER RA +09B1 ; disallowed # NA +09B2 ; valid # 1.1 BENGALI LETTER LA +09B3..09B5 ; disallowed # NA .. +09B6..09B9 ; valid # 1.1 BENGALI LETTER SHA..BENGALI LETTER HA +09BA..09BB ; disallowed # NA .. +09BC ; valid # 1.1 BENGALI SIGN NUKTA +09BD ; valid # 4.0 BENGALI SIGN AVAGRAHA +09BE..09C4 ; valid # 1.1 BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN VOCALIC RR +09C5..09C6 ; disallowed # NA .. +09C7..09C8 ; valid # 1.1 BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI +09C9..09CA ; disallowed # NA .. +09CB..09CD ; valid # 1.1 BENGALI VOWEL SIGN O..BENGALI SIGN VIRAMA +09CE ; valid # 4.1 BENGALI LETTER KHANDA TA +09CF..09D6 ; disallowed # NA .. +09D7 ; valid # 1.1 BENGALI AU LENGTH MARK +09D8..09DB ; disallowed # NA .. +09DC ; mapped ; 09A1 09BC # 1.1 BENGALI LETTER RRA +09DD ; mapped ; 09A2 09BC # 1.1 BENGALI LETTER RHA +09DE ; disallowed # NA +09DF ; mapped ; 09AF 09BC # 1.1 BENGALI LETTER YYA +09E0..09E3 ; valid # 1.1 BENGALI LETTER VOCALIC RR..BENGALI VOWEL SIGN VOCALIC LL +09E4..09E5 ; disallowed # NA .. +09E6..09F1 ; valid # 1.1 BENGALI DIGIT ZERO..BENGALI LETTER RA WITH LOWER DIAGONAL +09F2..09FA ; valid ; ; NV8 # 1.1 BENGALI RUPEE MARK..BENGALI ISSHAR +09FB ; valid ; ; NV8 # 5.2 BENGALI GANDA MARK +09FC..0A00 ; disallowed # NA .. +0A01 ; valid # 4.0 GURMUKHI SIGN ADAK BINDI +0A02 ; valid # 1.1 GURMUKHI SIGN BINDI +0A03 ; valid # 4.0 GURMUKHI SIGN VISARGA +0A04 ; disallowed # NA +0A05..0A0A ; valid # 1.1 GURMUKHI LETTER A..GURMUKHI LETTER UU +0A0B..0A0E ; disallowed # NA .. +0A0F..0A10 ; valid # 1.1 GURMUKHI LETTER EE..GURMUKHI LETTER AI +0A11..0A12 ; disallowed # NA .. +0A13..0A28 ; valid # 1.1 GURMUKHI LETTER OO..GURMUKHI LETTER NA +0A29 ; disallowed # NA +0A2A..0A30 ; valid # 1.1 GURMUKHI LETTER PA..GURMUKHI LETTER RA +0A31 ; disallowed # NA +0A32 ; valid # 1.1 GURMUKHI LETTER LA +0A33 ; mapped ; 0A32 0A3C # 1.1 GURMUKHI LETTER LLA +0A34 ; disallowed # NA +0A35 ; valid # 1.1 GURMUKHI LETTER VA +0A36 ; mapped ; 0A38 0A3C # 1.1 GURMUKHI LETTER SHA +0A37 ; disallowed # NA +0A38..0A39 ; valid # 1.1 GURMUKHI LETTER SA..GURMUKHI LETTER HA +0A3A..0A3B ; disallowed # NA .. +0A3C ; valid # 1.1 GURMUKHI SIGN NUKTA +0A3D ; disallowed # NA +0A3E..0A42 ; valid # 1.1 GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN UU +0A43..0A46 ; disallowed # NA .. +0A47..0A48 ; valid # 1.1 GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI +0A49..0A4A ; disallowed # NA .. +0A4B..0A4D ; valid # 1.1 GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA +0A4E..0A50 ; disallowed # NA .. +0A51 ; valid # 5.1 GURMUKHI SIGN UDAAT +0A52..0A58 ; disallowed # NA .. +0A59 ; mapped ; 0A16 0A3C # 1.1 GURMUKHI LETTER KHHA +0A5A ; mapped ; 0A17 0A3C # 1.1 GURMUKHI LETTER GHHA +0A5B ; mapped ; 0A1C 0A3C # 1.1 GURMUKHI LETTER ZA +0A5C ; valid # 1.1 GURMUKHI LETTER RRA +0A5D ; disallowed # NA +0A5E ; mapped ; 0A2B 0A3C # 1.1 GURMUKHI LETTER FA +0A5F..0A65 ; disallowed # NA .. +0A66..0A74 ; valid # 1.1 GURMUKHI DIGIT ZERO..GURMUKHI EK ONKAR +0A75 ; valid # 5.1 GURMUKHI SIGN YAKASH +0A76..0A80 ; disallowed # NA .. +0A81..0A83 ; valid # 1.1 GUJARATI SIGN CANDRABINDU..GUJARATI SIGN VISARGA +0A84 ; disallowed # NA +0A85..0A8B ; valid # 1.1 GUJARATI LETTER A..GUJARATI LETTER VOCALIC R +0A8C ; valid # 4.0 GUJARATI LETTER VOCALIC L +0A8D ; valid # 1.1 GUJARATI VOWEL CANDRA E +0A8E ; disallowed # NA +0A8F..0A91 ; valid # 1.1 GUJARATI LETTER E..GUJARATI VOWEL CANDRA O +0A92 ; disallowed # NA +0A93..0AA8 ; valid # 1.1 GUJARATI LETTER O..GUJARATI LETTER NA +0AA9 ; disallowed # NA +0AAA..0AB0 ; valid # 1.1 GUJARATI LETTER PA..GUJARATI LETTER RA +0AB1 ; disallowed # NA +0AB2..0AB3 ; valid # 1.1 GUJARATI LETTER LA..GUJARATI LETTER LLA +0AB4 ; disallowed # NA +0AB5..0AB9 ; valid # 1.1 GUJARATI LETTER VA..GUJARATI LETTER HA +0ABA..0ABB ; disallowed # NA .. +0ABC..0AC5 ; valid # 1.1 GUJARATI SIGN NUKTA..GUJARATI VOWEL SIGN CANDRA E +0AC6 ; disallowed # NA +0AC7..0AC9 ; valid # 1.1 GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN CANDRA O +0ACA ; disallowed # NA +0ACB..0ACD ; valid # 1.1 GUJARATI VOWEL SIGN O..GUJARATI SIGN VIRAMA +0ACE..0ACF ; disallowed # NA .. +0AD0 ; valid # 1.1 GUJARATI OM +0AD1..0ADF ; disallowed # NA .. +0AE0 ; valid # 1.1 GUJARATI LETTER VOCALIC RR +0AE1..0AE3 ; valid # 4.0 GUJARATI LETTER VOCALIC LL..GUJARATI VOWEL SIGN VOCALIC LL +0AE4..0AE5 ; disallowed # NA .. +0AE6..0AEF ; valid # 1.1 GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE +0AF0 ; valid ; ; NV8 # 6.1 GUJARATI ABBREVIATION SIGN +0AF1 ; valid ; ; NV8 # 4.0 GUJARATI RUPEE SIGN +0AF2..0AF8 ; disallowed # NA .. +0AF9 ; valid # 8.0 GUJARATI LETTER ZHA +0AFA..0B00 ; disallowed # NA .. +0B01..0B03 ; valid # 1.1 ORIYA SIGN CANDRABINDU..ORIYA SIGN VISARGA +0B04 ; disallowed # NA +0B05..0B0C ; valid # 1.1 ORIYA LETTER A..ORIYA LETTER VOCALIC L +0B0D..0B0E ; disallowed # NA .. +0B0F..0B10 ; valid # 1.1 ORIYA LETTER E..ORIYA LETTER AI +0B11..0B12 ; disallowed # NA .. +0B13..0B28 ; valid # 1.1 ORIYA LETTER O..ORIYA LETTER NA +0B29 ; disallowed # NA +0B2A..0B30 ; valid # 1.1 ORIYA LETTER PA..ORIYA LETTER RA +0B31 ; disallowed # NA +0B32..0B33 ; valid # 1.1 ORIYA LETTER LA..ORIYA LETTER LLA +0B34 ; disallowed # NA +0B35 ; valid # 4.0 ORIYA LETTER VA +0B36..0B39 ; valid # 1.1 ORIYA LETTER SHA..ORIYA LETTER HA +0B3A..0B3B ; disallowed # NA .. +0B3C..0B43 ; valid # 1.1 ORIYA SIGN NUKTA..ORIYA VOWEL SIGN VOCALIC R +0B44 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC RR +0B45..0B46 ; disallowed # NA .. +0B47..0B48 ; valid # 1.1 ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI +0B49..0B4A ; disallowed # NA .. +0B4B..0B4D ; valid # 1.1 ORIYA VOWEL SIGN O..ORIYA SIGN VIRAMA +0B4E..0B55 ; disallowed # NA .. +0B56..0B57 ; valid # 1.1 ORIYA AI LENGTH MARK..ORIYA AU LENGTH MARK +0B58..0B5B ; disallowed # NA .. +0B5C ; mapped ; 0B21 0B3C # 1.1 ORIYA LETTER RRA +0B5D ; mapped ; 0B22 0B3C # 1.1 ORIYA LETTER RHA +0B5E ; disallowed # NA +0B5F..0B61 ; valid # 1.1 ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL +0B62..0B63 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL +0B64..0B65 ; disallowed # NA .. +0B66..0B6F ; valid # 1.1 ORIYA DIGIT ZERO..ORIYA DIGIT NINE +0B70 ; valid ; ; NV8 # 1.1 ORIYA ISSHAR +0B71 ; valid # 4.0 ORIYA LETTER WA +0B72..0B77 ; valid ; ; NV8 # 6.0 ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS +0B78..0B81 ; disallowed # NA .. +0B82..0B83 ; valid # 1.1 TAMIL SIGN ANUSVARA..TAMIL SIGN VISARGA +0B84 ; disallowed # NA +0B85..0B8A ; valid # 1.1 TAMIL LETTER A..TAMIL LETTER UU +0B8B..0B8D ; disallowed # NA .. +0B8E..0B90 ; valid # 1.1 TAMIL LETTER E..TAMIL LETTER AI +0B91 ; disallowed # NA +0B92..0B95 ; valid # 1.1 TAMIL LETTER O..TAMIL LETTER KA +0B96..0B98 ; disallowed # NA .. +0B99..0B9A ; valid # 1.1 TAMIL LETTER NGA..TAMIL LETTER CA +0B9B ; disallowed # NA +0B9C ; valid # 1.1 TAMIL LETTER JA +0B9D ; disallowed # NA +0B9E..0B9F ; valid # 1.1 TAMIL LETTER NYA..TAMIL LETTER TTA +0BA0..0BA2 ; disallowed # NA .. +0BA3..0BA4 ; valid # 1.1 TAMIL LETTER NNA..TAMIL LETTER TA +0BA5..0BA7 ; disallowed # NA .. +0BA8..0BAA ; valid # 1.1 TAMIL LETTER NA..TAMIL LETTER PA +0BAB..0BAD ; disallowed # NA .. +0BAE..0BB5 ; valid # 1.1 TAMIL LETTER MA..TAMIL LETTER VA +0BB6 ; valid # 4.1 TAMIL LETTER SHA +0BB7..0BB9 ; valid # 1.1 TAMIL LETTER SSA..TAMIL LETTER HA +0BBA..0BBD ; disallowed # NA .. +0BBE..0BC2 ; valid # 1.1 TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN UU +0BC3..0BC5 ; disallowed # NA .. +0BC6..0BC8 ; valid # 1.1 TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI +0BC9 ; disallowed # NA +0BCA..0BCD ; valid # 1.1 TAMIL VOWEL SIGN O..TAMIL SIGN VIRAMA +0BCE..0BCF ; disallowed # NA .. +0BD0 ; valid # 5.1 TAMIL OM +0BD1..0BD6 ; disallowed # NA .. +0BD7 ; valid # 1.1 TAMIL AU LENGTH MARK +0BD8..0BE5 ; disallowed # NA .. +0BE6 ; valid # 4.1 TAMIL DIGIT ZERO +0BE7..0BEF ; valid # 1.1 TAMIL DIGIT ONE..TAMIL DIGIT NINE +0BF0..0BF2 ; valid ; ; NV8 # 1.1 TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND +0BF3..0BFA ; valid ; ; NV8 # 4.0 TAMIL DAY SIGN..TAMIL NUMBER SIGN +0BFB..0BFF ; disallowed # NA .. +0C00 ; valid # 7.0 TELUGU SIGN COMBINING CANDRABINDU ABOVE +0C01..0C03 ; valid # 1.1 TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA +0C04 ; disallowed # NA +0C05..0C0C ; valid # 1.1 TELUGU LETTER A..TELUGU LETTER VOCALIC L +0C0D ; disallowed # NA +0C0E..0C10 ; valid # 1.1 TELUGU LETTER E..TELUGU LETTER AI +0C11 ; disallowed # NA +0C12..0C28 ; valid # 1.1 TELUGU LETTER O..TELUGU LETTER NA +0C29 ; disallowed # NA +0C2A..0C33 ; valid # 1.1 TELUGU LETTER PA..TELUGU LETTER LLA +0C34 ; valid # 7.0 TELUGU LETTER LLLA +0C35..0C39 ; valid # 1.1 TELUGU LETTER VA..TELUGU LETTER HA +0C3A..0C3C ; disallowed # NA .. +0C3D ; valid # 5.1 TELUGU SIGN AVAGRAHA +0C3E..0C44 ; valid # 1.1 TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN VOCALIC RR +0C45 ; disallowed # NA +0C46..0C48 ; valid # 1.1 TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI +0C49 ; disallowed # NA +0C4A..0C4D ; valid # 1.1 TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA +0C4E..0C54 ; disallowed # NA .. +0C55..0C56 ; valid # 1.1 TELUGU LENGTH MARK..TELUGU AI LENGTH MARK +0C57 ; disallowed # NA +0C58..0C59 ; valid # 5.1 TELUGU LETTER TSA..TELUGU LETTER DZA +0C5A ; valid # 8.0 TELUGU LETTER RRRA +0C5B..0C5F ; disallowed # NA .. +0C60..0C61 ; valid # 1.1 TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL +0C62..0C63 ; valid # 5.1 TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL +0C64..0C65 ; disallowed # NA .. +0C66..0C6F ; valid # 1.1 TELUGU DIGIT ZERO..TELUGU DIGIT NINE +0C70..0C77 ; disallowed # NA .. +0C78..0C7F ; valid ; ; NV8 # 5.1 TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU SIGN TUUMU +0C80 ; valid # 9.0 KANNADA SIGN SPACING CANDRABINDU +0C81 ; valid # 7.0 KANNADA SIGN CANDRABINDU +0C82..0C83 ; valid # 1.1 KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA +0C84 ; disallowed # NA +0C85..0C8C ; valid # 1.1 KANNADA LETTER A..KANNADA LETTER VOCALIC L +0C8D ; disallowed # NA +0C8E..0C90 ; valid # 1.1 KANNADA LETTER E..KANNADA LETTER AI +0C91 ; disallowed # NA +0C92..0CA8 ; valid # 1.1 KANNADA LETTER O..KANNADA LETTER NA +0CA9 ; disallowed # NA +0CAA..0CB3 ; valid # 1.1 KANNADA LETTER PA..KANNADA LETTER LLA +0CB4 ; disallowed # NA +0CB5..0CB9 ; valid # 1.1 KANNADA LETTER VA..KANNADA LETTER HA +0CBA..0CBB ; disallowed # NA .. +0CBC..0CBD ; valid # 4.0 KANNADA SIGN NUKTA..KANNADA SIGN AVAGRAHA +0CBE..0CC4 ; valid # 1.1 KANNADA VOWEL SIGN AA..KANNADA VOWEL SIGN VOCALIC RR +0CC5 ; disallowed # NA +0CC6..0CC8 ; valid # 1.1 KANNADA VOWEL SIGN E..KANNADA VOWEL SIGN AI +0CC9 ; disallowed # NA +0CCA..0CCD ; valid # 1.1 KANNADA VOWEL SIGN O..KANNADA SIGN VIRAMA +0CCE..0CD4 ; disallowed # NA .. +0CD5..0CD6 ; valid # 1.1 KANNADA LENGTH MARK..KANNADA AI LENGTH MARK +0CD7..0CDD ; disallowed # NA .. +0CDE ; valid # 1.1 KANNADA LETTER FA +0CDF ; disallowed # NA +0CE0..0CE1 ; valid # 1.1 KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL +0CE2..0CE3 ; valid # 5.0 KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL +0CE4..0CE5 ; disallowed # NA .. +0CE6..0CEF ; valid # 1.1 KANNADA DIGIT ZERO..KANNADA DIGIT NINE +0CF0 ; disallowed # NA +0CF1..0CF2 ; valid # 5.0 KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA +0CF3..0D00 ; disallowed # NA .. +0D01 ; valid # 7.0 MALAYALAM SIGN CANDRABINDU +0D02..0D03 ; valid # 1.1 MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA +0D04 ; disallowed # NA +0D05..0D0C ; valid # 1.1 MALAYALAM LETTER A..MALAYALAM LETTER VOCALIC L +0D0D ; disallowed # NA +0D0E..0D10 ; valid # 1.1 MALAYALAM LETTER E..MALAYALAM LETTER AI +0D11 ; disallowed # NA +0D12..0D28 ; valid # 1.1 MALAYALAM LETTER O..MALAYALAM LETTER NA +0D29 ; valid # 6.0 MALAYALAM LETTER NNNA +0D2A..0D39 ; valid # 1.1 MALAYALAM LETTER PA..MALAYALAM LETTER HA +0D3A ; valid # 6.0 MALAYALAM LETTER TTTA +0D3B..0D3C ; disallowed # NA .. +0D3D ; valid # 5.1 MALAYALAM SIGN AVAGRAHA +0D3E..0D43 ; valid # 1.1 MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN VOCALIC R +0D44 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC RR +0D45 ; disallowed # NA +0D46..0D48 ; valid # 1.1 MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI +0D49 ; disallowed # NA +0D4A..0D4D ; valid # 1.1 MALAYALAM VOWEL SIGN O..MALAYALAM SIGN VIRAMA +0D4E ; valid # 6.0 MALAYALAM LETTER DOT REPH +0D4F ; valid ; ; NV8 # 9.0 MALAYALAM SIGN PARA +0D50..0D53 ; disallowed # NA .. +0D54..0D56 ; valid # 9.0 MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL +0D57 ; valid # 1.1 MALAYALAM AU LENGTH MARK +0D58..0D5E ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH +0D5F ; valid # 8.0 MALAYALAM LETTER ARCHAIC II +0D60..0D61 ; valid # 1.1 MALAYALAM LETTER VOCALIC RR..MALAYALAM LETTER VOCALIC LL +0D62..0D63 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL +0D64..0D65 ; disallowed # NA .. +0D66..0D6F ; valid # 1.1 MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE +0D70..0D75 ; valid ; ; NV8 # 5.1 MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE QUARTERS +0D76..0D78 ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE SIXTEENTH..MALAYALAM FRACTION THREE SIXTEENTHS +0D79 ; valid ; ; NV8 # 5.1 MALAYALAM DATE MARK +0D7A..0D7F ; valid # 5.1 MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K +0D80..0D81 ; disallowed # NA .. +0D82..0D83 ; valid # 3.0 SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA +0D84 ; disallowed # NA +0D85..0D96 ; valid # 3.0 SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA +0D97..0D99 ; disallowed # NA .. +0D9A..0DB1 ; valid # 3.0 SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA +0DB2 ; disallowed # NA +0DB3..0DBB ; valid # 3.0 SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA +0DBC ; disallowed # NA +0DBD ; valid # 3.0 SINHALA LETTER DANTAJA LAYANNA +0DBE..0DBF ; disallowed # NA .. +0DC0..0DC6 ; valid # 3.0 SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA +0DC7..0DC9 ; disallowed # NA .. +0DCA ; valid # 3.0 SINHALA SIGN AL-LAKUNA +0DCB..0DCE ; disallowed # NA .. +0DCF..0DD4 ; valid # 3.0 SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA +0DD5 ; disallowed # NA +0DD6 ; valid # 3.0 SINHALA VOWEL SIGN DIGA PAA-PILLA +0DD7 ; disallowed # NA +0DD8..0DDF ; valid # 3.0 SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA +0DE0..0DE5 ; disallowed # NA .. +0DE6..0DEF ; valid # 7.0 SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE +0DF0..0DF1 ; disallowed # NA .. +0DF2..0DF3 ; valid # 3.0 SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA +0DF4 ; valid ; ; NV8 # 3.0 SINHALA PUNCTUATION KUNDDALIYA +0DF5..0E00 ; disallowed # NA .. +0E01..0E32 ; valid # 1.1 THAI CHARACTER KO KAI..THAI CHARACTER SARA AA +0E33 ; mapped ; 0E4D 0E32 # 1.1 THAI CHARACTER SARA AM +0E34..0E3A ; valid # 1.1 THAI CHARACTER SARA I..THAI CHARACTER PHINTHU +0E3B..0E3E ; disallowed # NA .. +0E3F ; valid ; ; NV8 # 1.1 THAI CURRENCY SYMBOL BAHT +0E40..0E4E ; valid # 1.1 THAI CHARACTER SARA E..THAI CHARACTER YAMAKKAN +0E4F ; valid ; ; NV8 # 1.1 THAI CHARACTER FONGMAN +0E50..0E59 ; valid # 1.1 THAI DIGIT ZERO..THAI DIGIT NINE +0E5A..0E5B ; valid ; ; NV8 # 1.1 THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT +0E5C..0E80 ; disallowed # NA .. +0E81..0E82 ; valid # 1.1 LAO LETTER KO..LAO LETTER KHO SUNG +0E83 ; disallowed # NA +0E84 ; valid # 1.1 LAO LETTER KHO TAM +0E85..0E86 ; disallowed # NA .. +0E87..0E88 ; valid # 1.1 LAO LETTER NGO..LAO LETTER CO +0E89 ; disallowed # NA +0E8A ; valid # 1.1 LAO LETTER SO TAM +0E8B..0E8C ; disallowed # NA .. +0E8D ; valid # 1.1 LAO LETTER NYO +0E8E..0E93 ; disallowed # NA .. +0E94..0E97 ; valid # 1.1 LAO LETTER DO..LAO LETTER THO TAM +0E98 ; disallowed # NA +0E99..0E9F ; valid # 1.1 LAO LETTER NO..LAO LETTER FO SUNG +0EA0 ; disallowed # NA +0EA1..0EA3 ; valid # 1.1 LAO LETTER MO..LAO LETTER LO LING +0EA4 ; disallowed # NA +0EA5 ; valid # 1.1 LAO LETTER LO LOOT +0EA6 ; disallowed # NA +0EA7 ; valid # 1.1 LAO LETTER WO +0EA8..0EA9 ; disallowed # NA .. +0EAA..0EAB ; valid # 1.1 LAO LETTER SO SUNG..LAO LETTER HO SUNG +0EAC ; disallowed # NA +0EAD..0EB2 ; valid # 1.1 LAO LETTER O..LAO VOWEL SIGN AA +0EB3 ; mapped ; 0ECD 0EB2 # 1.1 LAO VOWEL SIGN AM +0EB4..0EB9 ; valid # 1.1 LAO VOWEL SIGN I..LAO VOWEL SIGN UU +0EBA ; disallowed # NA +0EBB..0EBD ; valid # 1.1 LAO VOWEL SIGN MAI KON..LAO SEMIVOWEL SIGN NYO +0EBE..0EBF ; disallowed # NA .. +0EC0..0EC4 ; valid # 1.1 LAO VOWEL SIGN E..LAO VOWEL SIGN AI +0EC5 ; disallowed # NA +0EC6 ; valid # 1.1 LAO KO LA +0EC7 ; disallowed # NA +0EC8..0ECD ; valid # 1.1 LAO TONE MAI EK..LAO NIGGAHITA +0ECE..0ECF ; disallowed # NA .. +0ED0..0ED9 ; valid # 1.1 LAO DIGIT ZERO..LAO DIGIT NINE +0EDA..0EDB ; disallowed # NA .. +0EDC ; mapped ; 0EAB 0E99 # 1.1 LAO HO NO +0EDD ; mapped ; 0EAB 0EA1 # 1.1 LAO HO MO +0EDE..0EDF ; valid # 6.1 LAO LETTER KHMU GO..LAO LETTER KHMU NYO +0EE0..0EFF ; disallowed # NA .. +0F00 ; valid # 2.0 TIBETAN SYLLABLE OM +0F01..0F0A ; valid ; ; NV8 # 2.0 TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK BKA- SHOG YIG MGO +0F0B ; valid # 2.0 TIBETAN MARK INTERSYLLABIC TSHEG +0F0C ; mapped ; 0F0B # 2.0 TIBETAN MARK DELIMITER TSHEG BSTAR +0F0D..0F17 ; valid ; ; NV8 # 2.0 TIBETAN MARK SHAD..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS +0F18..0F19 ; valid # 2.0 TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS +0F1A..0F1F ; valid ; ; NV8 # 2.0 TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG +0F20..0F29 ; valid # 2.0 TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE +0F2A..0F34 ; valid ; ; NV8 # 2.0 TIBETAN DIGIT HALF ONE..TIBETAN MARK BSDUS RTAGS +0F35 ; valid # 2.0 TIBETAN MARK NGAS BZUNG NYI ZLA +0F36 ; valid ; ; NV8 # 2.0 TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN +0F37 ; valid # 2.0 TIBETAN MARK NGAS BZUNG SGOR RTAGS +0F38 ; valid ; ; NV8 # 2.0 TIBETAN MARK CHE MGO +0F39 ; valid # 2.0 TIBETAN MARK TSA -PHRU +0F3A..0F3D ; valid ; ; NV8 # 2.0 TIBETAN MARK GUG RTAGS GYON..TIBETAN MARK ANG KHANG GYAS +0F3E..0F42 ; valid # 2.0 TIBETAN SIGN YAR TSHES..TIBETAN LETTER GA +0F43 ; mapped ; 0F42 0FB7 # 2.0 TIBETAN LETTER GHA +0F44..0F47 ; valid # 2.0 TIBETAN LETTER NGA..TIBETAN LETTER JA +0F48 ; disallowed # NA +0F49..0F4C ; valid # 2.0 TIBETAN LETTER NYA..TIBETAN LETTER DDA +0F4D ; mapped ; 0F4C 0FB7 # 2.0 TIBETAN LETTER DDHA +0F4E..0F51 ; valid # 2.0 TIBETAN LETTER NNA..TIBETAN LETTER DA +0F52 ; mapped ; 0F51 0FB7 # 2.0 TIBETAN LETTER DHA +0F53..0F56 ; valid # 2.0 TIBETAN LETTER NA..TIBETAN LETTER BA +0F57 ; mapped ; 0F56 0FB7 # 2.0 TIBETAN LETTER BHA +0F58..0F5B ; valid # 2.0 TIBETAN LETTER MA..TIBETAN LETTER DZA +0F5C ; mapped ; 0F5B 0FB7 # 2.0 TIBETAN LETTER DZHA +0F5D..0F68 ; valid # 2.0 TIBETAN LETTER WA..TIBETAN LETTER A +0F69 ; mapped ; 0F40 0FB5 # 2.0 TIBETAN LETTER KSSA +0F6A ; valid # 3.0 TIBETAN LETTER FIXED-FORM RA +0F6B..0F6C ; valid # 5.1 TIBETAN LETTER KKA..TIBETAN LETTER RRA +0F6D..0F70 ; disallowed # NA .. +0F71..0F72 ; valid # 2.0 TIBETAN VOWEL SIGN AA..TIBETAN VOWEL SIGN I +0F73 ; mapped ; 0F71 0F72 # 2.0 TIBETAN VOWEL SIGN II +0F74 ; valid # 2.0 TIBETAN VOWEL SIGN U +0F75 ; mapped ; 0F71 0F74 # 2.0 TIBETAN VOWEL SIGN UU +0F76 ; mapped ; 0FB2 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC R +0F77 ; mapped ; 0FB2 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC RR +0F78 ; mapped ; 0FB3 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC L +0F79 ; mapped ; 0FB3 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC LL +0F7A..0F80 ; valid # 2.0 TIBETAN VOWEL SIGN E..TIBETAN VOWEL SIGN REVERSED I +0F81 ; mapped ; 0F71 0F80 # 2.0 TIBETAN VOWEL SIGN REVERSED II +0F82..0F84 ; valid # 2.0 TIBETAN SIGN NYI ZLA NAA DA..TIBETAN MARK HALANTA +0F85 ; valid ; ; NV8 # 2.0 TIBETAN MARK PALUTA +0F86..0F8B ; valid # 2.0 TIBETAN SIGN LCI RTAGS..TIBETAN SIGN GRU MED RGYINGS +0F8C..0F8F ; valid # 6.0 TIBETAN SIGN INVERTED MCHU CAN..TIBETAN SUBJOINED SIGN INVERTED MCHU CAN +0F90..0F92 ; valid # 2.0 TIBETAN SUBJOINED LETTER KA..TIBETAN SUBJOINED LETTER GA +0F93 ; mapped ; 0F92 0FB7 # 2.0 TIBETAN SUBJOINED LETTER GHA +0F94..0F95 ; valid # 2.0 TIBETAN SUBJOINED LETTER NGA..TIBETAN SUBJOINED LETTER CA +0F96 ; valid # 3.0 TIBETAN SUBJOINED LETTER CHA +0F97 ; valid # 2.0 TIBETAN SUBJOINED LETTER JA +0F98 ; disallowed # NA +0F99..0F9C ; valid # 2.0 TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER DDA +0F9D ; mapped ; 0F9C 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DDHA +0F9E..0FA1 ; valid # 2.0 TIBETAN SUBJOINED LETTER NNA..TIBETAN SUBJOINED LETTER DA +0FA2 ; mapped ; 0FA1 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DHA +0FA3..0FA6 ; valid # 2.0 TIBETAN SUBJOINED LETTER NA..TIBETAN SUBJOINED LETTER BA +0FA7 ; mapped ; 0FA6 0FB7 # 2.0 TIBETAN SUBJOINED LETTER BHA +0FA8..0FAB ; valid # 2.0 TIBETAN SUBJOINED LETTER MA..TIBETAN SUBJOINED LETTER DZA +0FAC ; mapped ; 0FAB 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DZHA +0FAD ; valid # 2.0 TIBETAN SUBJOINED LETTER WA +0FAE..0FB0 ; valid # 3.0 TIBETAN SUBJOINED LETTER ZHA..TIBETAN SUBJOINED LETTER -A +0FB1..0FB7 ; valid # 2.0 TIBETAN SUBJOINED LETTER YA..TIBETAN SUBJOINED LETTER HA +0FB8 ; valid # 3.0 TIBETAN SUBJOINED LETTER A +0FB9 ; mapped ; 0F90 0FB5 # 2.0 TIBETAN SUBJOINED LETTER KSSA +0FBA..0FBC ; valid # 3.0 TIBETAN SUBJOINED LETTER FIXED-FORM WA..TIBETAN SUBJOINED LETTER FIXED-FORM RA +0FBD ; disallowed # NA +0FBE..0FC5 ; valid ; ; NV8 # 3.0 TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE +0FC6 ; valid # 3.0 TIBETAN SYMBOL PADMA GDAN +0FC7..0FCC ; valid ; ; NV8 # 3.0 TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL +0FCD ; disallowed # NA +0FCE ; valid ; ; NV8 # 5.1 TIBETAN SIGN RDEL NAG RDEL DKAR +0FCF ; valid ; ; NV8 # 3.0 TIBETAN SIGN RDEL NAG GSUM +0FD0..0FD1 ; valid ; ; NV8 # 4.1 TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN +0FD2..0FD4 ; valid ; ; NV8 # 5.1 TIBETAN MARK NYIS TSHEG..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA +0FD5..0FD8 ; valid ; ; NV8 # 5.2 RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS +0FD9..0FDA ; valid ; ; NV8 # 6.0 TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS +0FDB..0FFF ; disallowed # NA .. +1000..1021 ; valid # 3.0 MYANMAR LETTER KA..MYANMAR LETTER A +1022 ; valid # 5.1 MYANMAR LETTER SHAN A +1023..1027 ; valid # 3.0 MYANMAR LETTER I..MYANMAR LETTER E +1028 ; valid # 5.1 MYANMAR LETTER MON E +1029..102A ; valid # 3.0 MYANMAR LETTER O..MYANMAR LETTER AU +102B ; valid # 5.1 MYANMAR VOWEL SIGN TALL AA +102C..1032 ; valid # 3.0 MYANMAR VOWEL SIGN AA..MYANMAR VOWEL SIGN AI +1033..1035 ; valid # 5.1 MYANMAR VOWEL SIGN MON II..MYANMAR VOWEL SIGN E ABOVE +1036..1039 ; valid # 3.0 MYANMAR SIGN ANUSVARA..MYANMAR SIGN VIRAMA +103A..103F ; valid # 5.1 MYANMAR SIGN ASAT..MYANMAR LETTER GREAT SA +1040..1049 ; valid # 3.0 MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE +104A..104F ; valid ; ; NV8 # 3.0 MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE +1050..1059 ; valid # 3.0 MYANMAR LETTER SHA..MYANMAR VOWEL SIGN VOCALIC LL +105A..1099 ; valid # 5.1 MYANMAR LETTER MON NGA..MYANMAR SHAN DIGIT NINE +109A..109D ; valid # 5.2 MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON AI +109E..109F ; valid ; ; NV8 # 5.1 MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION +10A0..10C5 ; disallowed # 1.1 GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE +10C6 ; disallowed # NA +10C7 ; mapped ; 2D27 # 6.1 GEORGIAN CAPITAL LETTER YN +10C8..10CC ; disallowed # NA .. +10CD ; mapped ; 2D2D # 6.1 GEORGIAN CAPITAL LETTER AEN +10CE..10CF ; disallowed # NA .. +10D0..10F6 ; valid # 1.1 GEORGIAN LETTER AN..GEORGIAN LETTER FI +10F7..10F8 ; valid # 3.2 GEORGIAN LETTER YN..GEORGIAN LETTER ELIFI +10F9..10FA ; valid # 4.1 GEORGIAN LETTER TURNED GAN..GEORGIAN LETTER AIN +10FB ; valid ; ; NV8 # 1.1 GEORGIAN PARAGRAPH SEPARATOR +10FC ; mapped ; 10DC # 4.1 MODIFIER LETTER GEORGIAN NAR +10FD..10FF ; valid # 6.1 GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN +1100..1159 ; valid ; ; NV8 # 1.1 HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG YEORINHIEUH +115A..115E ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG KIYEOK-TIKEUT..HANGUL CHOSEONG TIKEUT-RIEUL +115F..1160 ; disallowed # 1.1 HANGUL CHOSEONG FILLER..HANGUL JUNGSEONG FILLER +1161..11A2 ; valid ; ; NV8 # 1.1 HANGUL JUNGSEONG A..HANGUL JUNGSEONG SSANGARAEA +11A3..11A7 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG A-EU..HANGUL JUNGSEONG O-YAE +11A8..11F9 ; valid ; ; NV8 # 1.1 HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG YEORINHIEUH +11FA..11FF ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG KIYEOK-NIEUN..HANGUL JONGSEONG SSANGNIEUN +1200..1206 ; valid # 3.0 ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE HO +1207 ; valid # 4.1 ETHIOPIC SYLLABLE HOA +1208..1246 ; valid # 3.0 ETHIOPIC SYLLABLE LA..ETHIOPIC SYLLABLE QO +1247 ; valid # 4.1 ETHIOPIC SYLLABLE QOA +1248 ; valid # 3.0 ETHIOPIC SYLLABLE QWA +1249 ; disallowed # NA +124A..124D ; valid # 3.0 ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE +124E..124F ; disallowed # NA .. +1250..1256 ; valid # 3.0 ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO +1257 ; disallowed # NA +1258 ; valid # 3.0 ETHIOPIC SYLLABLE QHWA +1259 ; disallowed # NA +125A..125D ; valid # 3.0 ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE +125E..125F ; disallowed # NA .. +1260..1286 ; valid # 3.0 ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XO +1287 ; valid # 4.1 ETHIOPIC SYLLABLE XOA +1288 ; valid # 3.0 ETHIOPIC SYLLABLE XWA +1289 ; disallowed # NA +128A..128D ; valid # 3.0 ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE +128E..128F ; disallowed # NA .. +1290..12AE ; valid # 3.0 ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KO +12AF ; valid # 4.1 ETHIOPIC SYLLABLE KOA +12B0 ; valid # 3.0 ETHIOPIC SYLLABLE KWA +12B1 ; disallowed # NA +12B2..12B5 ; valid # 3.0 ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE +12B6..12B7 ; disallowed # NA .. +12B8..12BE ; valid # 3.0 ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO +12BF ; disallowed # NA +12C0 ; valid # 3.0 ETHIOPIC SYLLABLE KXWA +12C1 ; disallowed # NA +12C2..12C5 ; valid # 3.0 ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE +12C6..12C7 ; disallowed # NA .. +12C8..12CE ; valid # 3.0 ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE WO +12CF ; valid # 4.1 ETHIOPIC SYLLABLE WOA +12D0..12D6 ; valid # 3.0 ETHIOPIC SYLLABLE PHARYNGEAL A..ETHIOPIC SYLLABLE PHARYNGEAL O +12D7 ; disallowed # NA +12D8..12EE ; valid # 3.0 ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE YO +12EF ; valid # 4.1 ETHIOPIC SYLLABLE YOA +12F0..130E ; valid # 3.0 ETHIOPIC SYLLABLE DA..ETHIOPIC SYLLABLE GO +130F ; valid # 4.1 ETHIOPIC SYLLABLE GOA +1310 ; valid # 3.0 ETHIOPIC SYLLABLE GWA +1311 ; disallowed # NA +1312..1315 ; valid # 3.0 ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE +1316..1317 ; disallowed # NA .. +1318..131E ; valid # 3.0 ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE GGO +131F ; valid # 4.1 ETHIOPIC SYLLABLE GGWAA +1320..1346 ; valid # 3.0 ETHIOPIC SYLLABLE THA..ETHIOPIC SYLLABLE TZO +1347 ; valid # 4.1 ETHIOPIC SYLLABLE TZOA +1348..135A ; valid # 3.0 ETHIOPIC SYLLABLE FA..ETHIOPIC SYLLABLE FYA +135B..135C ; disallowed # NA .. +135D..135E ; valid # 6.0 ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING VOWEL LENGTH MARK +135F ; valid # 4.1 ETHIOPIC COMBINING GEMINATION MARK +1360 ; valid ; ; NV8 # 4.1 ETHIOPIC SECTION MARK +1361..137C ; valid ; ; NV8 # 3.0 ETHIOPIC WORDSPACE..ETHIOPIC NUMBER TEN THOUSAND +137D..137F ; disallowed # NA .. +1380..138F ; valid # 4.1 ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE +1390..1399 ; valid ; ; NV8 # 4.1 ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT +139A..139F ; disallowed # NA .. +13A0..13F4 ; valid # 3.0 CHEROKEE LETTER A..CHEROKEE LETTER YV +13F5 ; valid # 8.0 CHEROKEE LETTER MV +13F6..13F7 ; disallowed # NA .. +13F8 ; mapped ; 13F0 # 8.0 CHEROKEE SMALL LETTER YE +13F9 ; mapped ; 13F1 # 8.0 CHEROKEE SMALL LETTER YI +13FA ; mapped ; 13F2 # 8.0 CHEROKEE SMALL LETTER YO +13FB ; mapped ; 13F3 # 8.0 CHEROKEE SMALL LETTER YU +13FC ; mapped ; 13F4 # 8.0 CHEROKEE SMALL LETTER YV +13FD ; mapped ; 13F5 # 8.0 CHEROKEE SMALL LETTER MV +13FE..13FF ; disallowed # NA .. +1400 ; valid ; ; NV8 # 5.2 CANADIAN SYLLABICS HYPHEN +1401..166C ; valid # 3.0 CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA +166D..166E ; valid ; ; NV8 # 3.0 CANADIAN SYLLABICS CHI SIGN..CANADIAN SYLLABICS FULL STOP +166F..1676 ; valid # 3.0 CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS NNGAA +1677..167F ; valid # 5.2 CANADIAN SYLLABICS WOODS-CREE THWEE..CANADIAN SYLLABICS BLACKFOOT W +1680 ; disallowed # 3.0 OGHAM SPACE MARK +1681..169A ; valid # 3.0 OGHAM LETTER BEITH..OGHAM LETTER PEITH +169B..169C ; valid ; ; NV8 # 3.0 OGHAM FEATHER MARK..OGHAM REVERSED FEATHER MARK +169D..169F ; disallowed # NA .. +16A0..16EA ; valid # 3.0 RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X +16EB..16F0 ; valid ; ; NV8 # 3.0 RUNIC SINGLE PUNCTUATION..RUNIC BELGTHOR SYMBOL +16F1..16F8 ; valid # 7.0 RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC +16F9..16FF ; disallowed # NA .. +1700..170C ; valid # 3.2 TAGALOG LETTER A..TAGALOG LETTER YA +170D ; disallowed # NA +170E..1714 ; valid # 3.2 TAGALOG LETTER LA..TAGALOG SIGN VIRAMA +1715..171F ; disallowed # NA .. +1720..1734 ; valid # 3.2 HANUNOO LETTER A..HANUNOO SIGN PAMUDPOD +1735..1736 ; valid ; ; NV8 # 3.2 PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION +1737..173F ; disallowed # NA .. +1740..1753 ; valid # 3.2 BUHID LETTER A..BUHID VOWEL SIGN U +1754..175F ; disallowed # NA .. +1760..176C ; valid # 3.2 TAGBANWA LETTER A..TAGBANWA LETTER YA +176D ; disallowed # NA +176E..1770 ; valid # 3.2 TAGBANWA LETTER LA..TAGBANWA LETTER SA +1771 ; disallowed # NA +1772..1773 ; valid # 3.2 TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U +1774..177F ; disallowed # NA .. +1780..17B3 ; valid # 3.0 KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU +17B4..17B5 ; disallowed # 3.0 KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA +17B6..17D3 ; valid # 3.0 KHMER VOWEL SIGN AA..KHMER SIGN BATHAMASAT +17D4..17D6 ; valid ; ; NV8 # 3.0 KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH +17D7 ; valid # 3.0 KHMER SIGN LEK TOO +17D8..17DB ; valid ; ; NV8 # 3.0 KHMER SIGN BEYYAL..KHMER CURRENCY SYMBOL RIEL +17DC ; valid # 3.0 KHMER SIGN AVAKRAHASANYA +17DD ; valid # 4.0 KHMER SIGN ATTHACAN +17DE..17DF ; disallowed # NA .. +17E0..17E9 ; valid # 3.0 KHMER DIGIT ZERO..KHMER DIGIT NINE +17EA..17EF ; disallowed # NA .. +17F0..17F9 ; valid ; ; NV8 # 4.0 KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON +17FA..17FF ; disallowed # NA .. +1800..1805 ; valid ; ; NV8 # 3.0 MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS +1806 ; disallowed # 3.0 MONGOLIAN TODO SOFT HYPHEN +1807..180A ; valid ; ; NV8 # 3.0 MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU +180B..180D ; ignored # 3.0 MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE +180E ; disallowed # 3.0 MONGOLIAN VOWEL SEPARATOR +180F ; disallowed # NA +1810..1819 ; valid # 3.0 MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE +181A..181F ; disallowed # NA .. +1820..1877 ; valid # 3.0 MONGOLIAN LETTER A..MONGOLIAN LETTER MANCHU ZHA +1878..187F ; disallowed # NA .. +1880..18A9 ; valid # 3.0 MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI DAGALGA +18AA ; valid # 5.1 MONGOLIAN LETTER MANCHU ALI GALI LHA +18AB..18AF ; disallowed # NA .. +18B0..18F5 ; valid # 5.2 CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S +18F6..18FF ; disallowed # NA .. +1900..191C ; valid # 4.0 LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER HA +191D..191E ; valid # 7.0 LIMBU LETTER GYAN..LIMBU LETTER TRA +191F ; disallowed # NA +1920..192B ; valid # 4.0 LIMBU VOWEL SIGN A..LIMBU SUBJOINED LETTER WA +192C..192F ; disallowed # NA .. +1930..193B ; valid # 4.0 LIMBU SMALL LETTER KA..LIMBU SIGN SA-I +193C..193F ; disallowed # NA .. +1940 ; valid ; ; NV8 # 4.0 LIMBU SIGN LOO +1941..1943 ; disallowed # NA .. +1944..1945 ; valid ; ; NV8 # 4.0 LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK +1946..196D ; valid # 4.0 LIMBU DIGIT ZERO..TAI LE LETTER AI +196E..196F ; disallowed # NA .. +1970..1974 ; valid # 4.0 TAI LE LETTER TONE-2..TAI LE LETTER TONE-6 +1975..197F ; disallowed # NA .. +1980..19A9 ; valid # 4.1 NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW XVA +19AA..19AB ; valid # 5.2 NEW TAI LUE LETTER HIGH SUA..NEW TAI LUE LETTER LOW SUA +19AC..19AF ; disallowed # NA .. +19B0..19C9 ; valid # 4.1 NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2 +19CA..19CF ; disallowed # NA .. +19D0..19D9 ; valid # 4.1 NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE +19DA ; valid ; ; XV8 # 5.2 NEW TAI LUE THAM DIGIT ONE +19DB..19DD ; disallowed # NA .. +19DE..19DF ; valid ; ; NV8 # 4.1 NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV +19E0..19FF ; valid ; ; NV8 # 4.0 KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC +1A00..1A1B ; valid # 4.1 BUGINESE LETTER KA..BUGINESE VOWEL SIGN AE +1A1C..1A1D ; disallowed # NA .. +1A1E..1A1F ; valid ; ; NV8 # 4.1 BUGINESE PALLAWA..BUGINESE END OF SECTION +1A20..1A5E ; valid # 5.2 TAI THAM LETTER HIGH KA..TAI THAM CONSONANT SIGN SA +1A5F ; disallowed # NA +1A60..1A7C ; valid # 5.2 TAI THAM SIGN SAKOT..TAI THAM SIGN KHUEN-LUE KARAN +1A7D..1A7E ; disallowed # NA .. +1A7F..1A89 ; valid # 5.2 TAI THAM COMBINING CRYPTOGRAMMIC DOT..TAI THAM HORA DIGIT NINE +1A8A..1A8F ; disallowed # NA .. +1A90..1A99 ; valid # 5.2 TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE +1A9A..1A9F ; disallowed # NA .. +1AA0..1AA6 ; valid ; ; NV8 # 5.2 TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA +1AA7 ; valid # 5.2 TAI THAM SIGN MAI YAMOK +1AA8..1AAD ; valid ; ; NV8 # 5.2 TAI THAM SIGN KAAN..TAI THAM SIGN CAANG +1AAE..1AAF ; disallowed # NA .. +1AB0..1ABD ; valid # 7.0 COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW +1ABE ; valid ; ; NV8 # 7.0 COMBINING PARENTHESES OVERLAY +1ABF..1AFF ; disallowed # NA .. +1B00..1B4B ; valid # 5.0 BALINESE SIGN ULU RICEM..BALINESE LETTER ASYURA SASAK +1B4C..1B4F ; disallowed # NA .. +1B50..1B59 ; valid # 5.0 BALINESE DIGIT ZERO..BALINESE DIGIT NINE +1B5A..1B6A ; valid ; ; NV8 # 5.0 BALINESE PANTI..BALINESE MUSICAL SYMBOL DANG GEDE +1B6B..1B73 ; valid # 5.0 BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG +1B74..1B7C ; valid ; ; NV8 # 5.0 BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING +1B7D..1B7F ; disallowed # NA .. +1B80..1BAA ; valid # 5.1 SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PAMAAEH +1BAB..1BAD ; valid # 6.1 SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA +1BAE..1BB9 ; valid # 5.1 SUNDANESE LETTER KHA..SUNDANESE DIGIT NINE +1BBA..1BBF ; valid # 6.1 SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M +1BC0..1BF3 ; valid # 6.0 BATAK LETTER A..BATAK PANONGONAN +1BF4..1BFB ; disallowed # NA .. +1BFC..1BFF ; valid ; ; NV8 # 6.0 BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT +1C00..1C37 ; valid # 5.1 LEPCHA LETTER KA..LEPCHA SIGN NUKTA +1C38..1C3A ; disallowed # NA .. +1C3B..1C3F ; valid ; ; NV8 # 5.1 LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK +1C40..1C49 ; valid # 5.1 LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE +1C4A..1C4C ; disallowed # NA .. +1C4D..1C7D ; valid # 5.1 LEPCHA LETTER TTA..OL CHIKI AHAD +1C7E..1C7F ; valid ; ; NV8 # 5.1 OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD +1C80 ; mapped ; 0432 # 9.0 CYRILLIC SMALL LETTER ROUNDED VE +1C81 ; mapped ; 0434 # 9.0 CYRILLIC SMALL LETTER LONG-LEGGED DE +1C82 ; mapped ; 043E # 9.0 CYRILLIC SMALL LETTER NARROW O +1C83 ; mapped ; 0441 # 9.0 CYRILLIC SMALL LETTER WIDE ES +1C84..1C85 ; mapped ; 0442 # 9.0 CYRILLIC SMALL LETTER TALL TE..CYRILLIC SMALL LETTER THREE-LEGGED TE +1C86 ; mapped ; 044A # 9.0 CYRILLIC SMALL LETTER TALL HARD SIGN +1C87 ; mapped ; 0463 # 9.0 CYRILLIC SMALL LETTER TALL YAT +1C88 ; mapped ; A64B # 9.0 CYRILLIC SMALL LETTER UNBLENDED UK +1C89..1CBF ; disallowed # NA .. +1CC0..1CC7 ; valid ; ; NV8 # 6.1 SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA +1CC8..1CCF ; disallowed # NA .. +1CD0..1CD2 ; valid # 5.2 VEDIC TONE KARSHANA..VEDIC TONE PRENKHA +1CD3 ; valid ; ; NV8 # 5.2 VEDIC SIGN NIHSHVASA +1CD4..1CF2 ; valid # 5.2 VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC SIGN ARDHAVISARGA +1CF3..1CF6 ; valid # 6.1 VEDIC SIGN ROTATED ARDHAVISARGA..VEDIC SIGN UPADHMANIYA +1CF7 ; disallowed # NA +1CF8..1CF9 ; valid # 7.0 VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE +1CFA..1CFF ; disallowed # NA .. +1D00..1D2B ; valid # 4.0 LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL +1D2C ; mapped ; 0061 # 4.0 MODIFIER LETTER CAPITAL A +1D2D ; mapped ; 00E6 # 4.0 MODIFIER LETTER CAPITAL AE +1D2E ; mapped ; 0062 # 4.0 MODIFIER LETTER CAPITAL B +1D2F ; valid # 4.0 MODIFIER LETTER CAPITAL BARRED B +1D30 ; mapped ; 0064 # 4.0 MODIFIER LETTER CAPITAL D +1D31 ; mapped ; 0065 # 4.0 MODIFIER LETTER CAPITAL E +1D32 ; mapped ; 01DD # 4.0 MODIFIER LETTER CAPITAL REVERSED E +1D33 ; mapped ; 0067 # 4.0 MODIFIER LETTER CAPITAL G +1D34 ; mapped ; 0068 # 4.0 MODIFIER LETTER CAPITAL H +1D35 ; mapped ; 0069 # 4.0 MODIFIER LETTER CAPITAL I +1D36 ; mapped ; 006A # 4.0 MODIFIER LETTER CAPITAL J +1D37 ; mapped ; 006B # 4.0 MODIFIER LETTER CAPITAL K +1D38 ; mapped ; 006C # 4.0 MODIFIER LETTER CAPITAL L +1D39 ; mapped ; 006D # 4.0 MODIFIER LETTER CAPITAL M +1D3A ; mapped ; 006E # 4.0 MODIFIER LETTER CAPITAL N +1D3B ; valid # 4.0 MODIFIER LETTER CAPITAL REVERSED N +1D3C ; mapped ; 006F # 4.0 MODIFIER LETTER CAPITAL O +1D3D ; mapped ; 0223 # 4.0 MODIFIER LETTER CAPITAL OU +1D3E ; mapped ; 0070 # 4.0 MODIFIER LETTER CAPITAL P +1D3F ; mapped ; 0072 # 4.0 MODIFIER LETTER CAPITAL R +1D40 ; mapped ; 0074 # 4.0 MODIFIER LETTER CAPITAL T +1D41 ; mapped ; 0075 # 4.0 MODIFIER LETTER CAPITAL U +1D42 ; mapped ; 0077 # 4.0 MODIFIER LETTER CAPITAL W +1D43 ; mapped ; 0061 # 4.0 MODIFIER LETTER SMALL A +1D44 ; mapped ; 0250 # 4.0 MODIFIER LETTER SMALL TURNED A +1D45 ; mapped ; 0251 # 4.0 MODIFIER LETTER SMALL ALPHA +1D46 ; mapped ; 1D02 # 4.0 MODIFIER LETTER SMALL TURNED AE +1D47 ; mapped ; 0062 # 4.0 MODIFIER LETTER SMALL B +1D48 ; mapped ; 0064 # 4.0 MODIFIER LETTER SMALL D +1D49 ; mapped ; 0065 # 4.0 MODIFIER LETTER SMALL E +1D4A ; mapped ; 0259 # 4.0 MODIFIER LETTER SMALL SCHWA +1D4B ; mapped ; 025B # 4.0 MODIFIER LETTER SMALL OPEN E +1D4C ; mapped ; 025C # 4.0 MODIFIER LETTER SMALL TURNED OPEN E +1D4D ; mapped ; 0067 # 4.0 MODIFIER LETTER SMALL G +1D4E ; valid # 4.0 MODIFIER LETTER SMALL TURNED I +1D4F ; mapped ; 006B # 4.0 MODIFIER LETTER SMALL K +1D50 ; mapped ; 006D # 4.0 MODIFIER LETTER SMALL M +1D51 ; mapped ; 014B # 4.0 MODIFIER LETTER SMALL ENG +1D52 ; mapped ; 006F # 4.0 MODIFIER LETTER SMALL O +1D53 ; mapped ; 0254 # 4.0 MODIFIER LETTER SMALL OPEN O +1D54 ; mapped ; 1D16 # 4.0 MODIFIER LETTER SMALL TOP HALF O +1D55 ; mapped ; 1D17 # 4.0 MODIFIER LETTER SMALL BOTTOM HALF O +1D56 ; mapped ; 0070 # 4.0 MODIFIER LETTER SMALL P +1D57 ; mapped ; 0074 # 4.0 MODIFIER LETTER SMALL T +1D58 ; mapped ; 0075 # 4.0 MODIFIER LETTER SMALL U +1D59 ; mapped ; 1D1D # 4.0 MODIFIER LETTER SMALL SIDEWAYS U +1D5A ; mapped ; 026F # 4.0 MODIFIER LETTER SMALL TURNED M +1D5B ; mapped ; 0076 # 4.0 MODIFIER LETTER SMALL V +1D5C ; mapped ; 1D25 # 4.0 MODIFIER LETTER SMALL AIN +1D5D ; mapped ; 03B2 # 4.0 MODIFIER LETTER SMALL BETA +1D5E ; mapped ; 03B3 # 4.0 MODIFIER LETTER SMALL GREEK GAMMA +1D5F ; mapped ; 03B4 # 4.0 MODIFIER LETTER SMALL DELTA +1D60 ; mapped ; 03C6 # 4.0 MODIFIER LETTER SMALL GREEK PHI +1D61 ; mapped ; 03C7 # 4.0 MODIFIER LETTER SMALL CHI +1D62 ; mapped ; 0069 # 4.0 LATIN SUBSCRIPT SMALL LETTER I +1D63 ; mapped ; 0072 # 4.0 LATIN SUBSCRIPT SMALL LETTER R +1D64 ; mapped ; 0075 # 4.0 LATIN SUBSCRIPT SMALL LETTER U +1D65 ; mapped ; 0076 # 4.0 LATIN SUBSCRIPT SMALL LETTER V +1D66 ; mapped ; 03B2 # 4.0 GREEK SUBSCRIPT SMALL LETTER BETA +1D67 ; mapped ; 03B3 # 4.0 GREEK SUBSCRIPT SMALL LETTER GAMMA +1D68 ; mapped ; 03C1 # 4.0 GREEK SUBSCRIPT SMALL LETTER RHO +1D69 ; mapped ; 03C6 # 4.0 GREEK SUBSCRIPT SMALL LETTER PHI +1D6A ; mapped ; 03C7 # 4.0 GREEK SUBSCRIPT SMALL LETTER CHI +1D6B ; valid # 4.0 LATIN SMALL LETTER UE +1D6C..1D77 ; valid # 4.1 LATIN SMALL LETTER B WITH MIDDLE TILDE..LATIN SMALL LETTER TURNED G +1D78 ; mapped ; 043D # 4.1 MODIFIER LETTER CYRILLIC EN +1D79..1D9A ; valid # 4.1 LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK +1D9B ; mapped ; 0252 # 4.1 MODIFIER LETTER SMALL TURNED ALPHA +1D9C ; mapped ; 0063 # 4.1 MODIFIER LETTER SMALL C +1D9D ; mapped ; 0255 # 4.1 MODIFIER LETTER SMALL C WITH CURL +1D9E ; mapped ; 00F0 # 4.1 MODIFIER LETTER SMALL ETH +1D9F ; mapped ; 025C # 4.1 MODIFIER LETTER SMALL REVERSED OPEN E +1DA0 ; mapped ; 0066 # 4.1 MODIFIER LETTER SMALL F +1DA1 ; mapped ; 025F # 4.1 MODIFIER LETTER SMALL DOTLESS J WITH STROKE +1DA2 ; mapped ; 0261 # 4.1 MODIFIER LETTER SMALL SCRIPT G +1DA3 ; mapped ; 0265 # 4.1 MODIFIER LETTER SMALL TURNED H +1DA4 ; mapped ; 0268 # 4.1 MODIFIER LETTER SMALL I WITH STROKE +1DA5 ; mapped ; 0269 # 4.1 MODIFIER LETTER SMALL IOTA +1DA6 ; mapped ; 026A # 4.1 MODIFIER LETTER SMALL CAPITAL I +1DA7 ; mapped ; 1D7B # 4.1 MODIFIER LETTER SMALL CAPITAL I WITH STROKE +1DA8 ; mapped ; 029D # 4.1 MODIFIER LETTER SMALL J WITH CROSSED-TAIL +1DA9 ; mapped ; 026D # 4.1 MODIFIER LETTER SMALL L WITH RETROFLEX HOOK +1DAA ; mapped ; 1D85 # 4.1 MODIFIER LETTER SMALL L WITH PALATAL HOOK +1DAB ; mapped ; 029F # 4.1 MODIFIER LETTER SMALL CAPITAL L +1DAC ; mapped ; 0271 # 4.1 MODIFIER LETTER SMALL M WITH HOOK +1DAD ; mapped ; 0270 # 4.1 MODIFIER LETTER SMALL TURNED M WITH LONG LEG +1DAE ; mapped ; 0272 # 4.1 MODIFIER LETTER SMALL N WITH LEFT HOOK +1DAF ; mapped ; 0273 # 4.1 MODIFIER LETTER SMALL N WITH RETROFLEX HOOK +1DB0 ; mapped ; 0274 # 4.1 MODIFIER LETTER SMALL CAPITAL N +1DB1 ; mapped ; 0275 # 4.1 MODIFIER LETTER SMALL BARRED O +1DB2 ; mapped ; 0278 # 4.1 MODIFIER LETTER SMALL PHI +1DB3 ; mapped ; 0282 # 4.1 MODIFIER LETTER SMALL S WITH HOOK +1DB4 ; mapped ; 0283 # 4.1 MODIFIER LETTER SMALL ESH +1DB5 ; mapped ; 01AB # 4.1 MODIFIER LETTER SMALL T WITH PALATAL HOOK +1DB6 ; mapped ; 0289 # 4.1 MODIFIER LETTER SMALL U BAR +1DB7 ; mapped ; 028A # 4.1 MODIFIER LETTER SMALL UPSILON +1DB8 ; mapped ; 1D1C # 4.1 MODIFIER LETTER SMALL CAPITAL U +1DB9 ; mapped ; 028B # 4.1 MODIFIER LETTER SMALL V WITH HOOK +1DBA ; mapped ; 028C # 4.1 MODIFIER LETTER SMALL TURNED V +1DBB ; mapped ; 007A # 4.1 MODIFIER LETTER SMALL Z +1DBC ; mapped ; 0290 # 4.1 MODIFIER LETTER SMALL Z WITH RETROFLEX HOOK +1DBD ; mapped ; 0291 # 4.1 MODIFIER LETTER SMALL Z WITH CURL +1DBE ; mapped ; 0292 # 4.1 MODIFIER LETTER SMALL EZH +1DBF ; mapped ; 03B8 # 4.1 MODIFIER LETTER SMALL THETA +1DC0..1DC3 ; valid # 4.1 COMBINING DOTTED GRAVE ACCENT..COMBINING SUSPENSION MARK +1DC4..1DCA ; valid # 5.0 COMBINING MACRON-ACUTE..COMBINING LATIN SMALL LETTER R BELOW +1DCB..1DE6 ; valid # 5.1 COMBINING BREVE-MACRON..COMBINING LATIN SMALL LETTER Z +1DE7..1DF5 ; valid # 7.0 COMBINING LATIN SMALL LETTER ALPHA..COMBINING UP TACK ABOVE +1DF6..1DFA ; disallowed # NA .. +1DFB ; valid # 9.0 COMBINING DELETION MARK +1DFC ; valid # 6.0 COMBINING DOUBLE INVERTED BREVE BELOW +1DFD ; valid # 5.2 COMBINING ALMOST EQUAL TO BELOW +1DFE..1DFF ; valid # 5.0 COMBINING LEFT ARROWHEAD ABOVE..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW +1E00 ; mapped ; 1E01 # 1.1 LATIN CAPITAL LETTER A WITH RING BELOW +1E01 ; valid # 1.1 LATIN SMALL LETTER A WITH RING BELOW +1E02 ; mapped ; 1E03 # 1.1 LATIN CAPITAL LETTER B WITH DOT ABOVE +1E03 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT ABOVE +1E04 ; mapped ; 1E05 # 1.1 LATIN CAPITAL LETTER B WITH DOT BELOW +1E05 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT BELOW +1E06 ; mapped ; 1E07 # 1.1 LATIN CAPITAL LETTER B WITH LINE BELOW +1E07 ; valid # 1.1 LATIN SMALL LETTER B WITH LINE BELOW +1E08 ; mapped ; 1E09 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA AND ACUTE +1E09 ; valid # 1.1 LATIN SMALL LETTER C WITH CEDILLA AND ACUTE +1E0A ; mapped ; 1E0B # 1.1 LATIN CAPITAL LETTER D WITH DOT ABOVE +1E0B ; valid # 1.1 LATIN SMALL LETTER D WITH DOT ABOVE +1E0C ; mapped ; 1E0D # 1.1 LATIN CAPITAL LETTER D WITH DOT BELOW +1E0D ; valid # 1.1 LATIN SMALL LETTER D WITH DOT BELOW +1E0E ; mapped ; 1E0F # 1.1 LATIN CAPITAL LETTER D WITH LINE BELOW +1E0F ; valid # 1.1 LATIN SMALL LETTER D WITH LINE BELOW +1E10 ; mapped ; 1E11 # 1.1 LATIN CAPITAL LETTER D WITH CEDILLA +1E11 ; valid # 1.1 LATIN SMALL LETTER D WITH CEDILLA +1E12 ; mapped ; 1E13 # 1.1 LATIN CAPITAL LETTER D WITH CIRCUMFLEX BELOW +1E13 ; valid # 1.1 LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW +1E14 ; mapped ; 1E15 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND GRAVE +1E15 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND GRAVE +1E16 ; mapped ; 1E17 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND ACUTE +1E17 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND ACUTE +1E18 ; mapped ; 1E19 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX BELOW +1E19 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW +1E1A ; mapped ; 1E1B # 1.1 LATIN CAPITAL LETTER E WITH TILDE BELOW +1E1B ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE BELOW +1E1C ; mapped ; 1E1D # 1.1 LATIN CAPITAL LETTER E WITH CEDILLA AND BREVE +1E1D ; valid # 1.1 LATIN SMALL LETTER E WITH CEDILLA AND BREVE +1E1E ; mapped ; 1E1F # 1.1 LATIN CAPITAL LETTER F WITH DOT ABOVE +1E1F ; valid # 1.1 LATIN SMALL LETTER F WITH DOT ABOVE +1E20 ; mapped ; 1E21 # 1.1 LATIN CAPITAL LETTER G WITH MACRON +1E21 ; valid # 1.1 LATIN SMALL LETTER G WITH MACRON +1E22 ; mapped ; 1E23 # 1.1 LATIN CAPITAL LETTER H WITH DOT ABOVE +1E23 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT ABOVE +1E24 ; mapped ; 1E25 # 1.1 LATIN CAPITAL LETTER H WITH DOT BELOW +1E25 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT BELOW +1E26 ; mapped ; 1E27 # 1.1 LATIN CAPITAL LETTER H WITH DIAERESIS +1E27 ; valid # 1.1 LATIN SMALL LETTER H WITH DIAERESIS +1E28 ; mapped ; 1E29 # 1.1 LATIN CAPITAL LETTER H WITH CEDILLA +1E29 ; valid # 1.1 LATIN SMALL LETTER H WITH CEDILLA +1E2A ; mapped ; 1E2B # 1.1 LATIN CAPITAL LETTER H WITH BREVE BELOW +1E2B ; valid # 1.1 LATIN SMALL LETTER H WITH BREVE BELOW +1E2C ; mapped ; 1E2D # 1.1 LATIN CAPITAL LETTER I WITH TILDE BELOW +1E2D ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE BELOW +1E2E ; mapped ; 1E2F # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS AND ACUTE +1E2F ; valid # 1.1 LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE +1E30 ; mapped ; 1E31 # 1.1 LATIN CAPITAL LETTER K WITH ACUTE +1E31 ; valid # 1.1 LATIN SMALL LETTER K WITH ACUTE +1E32 ; mapped ; 1E33 # 1.1 LATIN CAPITAL LETTER K WITH DOT BELOW +1E33 ; valid # 1.1 LATIN SMALL LETTER K WITH DOT BELOW +1E34 ; mapped ; 1E35 # 1.1 LATIN CAPITAL LETTER K WITH LINE BELOW +1E35 ; valid # 1.1 LATIN SMALL LETTER K WITH LINE BELOW +1E36 ; mapped ; 1E37 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW +1E37 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW +1E38 ; mapped ; 1E39 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW AND MACRON +1E39 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW AND MACRON +1E3A ; mapped ; 1E3B # 1.1 LATIN CAPITAL LETTER L WITH LINE BELOW +1E3B ; valid # 1.1 LATIN SMALL LETTER L WITH LINE BELOW +1E3C ; mapped ; 1E3D # 1.1 LATIN CAPITAL LETTER L WITH CIRCUMFLEX BELOW +1E3D ; valid # 1.1 LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW +1E3E ; mapped ; 1E3F # 1.1 LATIN CAPITAL LETTER M WITH ACUTE +1E3F ; valid # 1.1 LATIN SMALL LETTER M WITH ACUTE +1E40 ; mapped ; 1E41 # 1.1 LATIN CAPITAL LETTER M WITH DOT ABOVE +1E41 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT ABOVE +1E42 ; mapped ; 1E43 # 1.1 LATIN CAPITAL LETTER M WITH DOT BELOW +1E43 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT BELOW +1E44 ; mapped ; 1E45 # 1.1 LATIN CAPITAL LETTER N WITH DOT ABOVE +1E45 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT ABOVE +1E46 ; mapped ; 1E47 # 1.1 LATIN CAPITAL LETTER N WITH DOT BELOW +1E47 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT BELOW +1E48 ; mapped ; 1E49 # 1.1 LATIN CAPITAL LETTER N WITH LINE BELOW +1E49 ; valid # 1.1 LATIN SMALL LETTER N WITH LINE BELOW +1E4A ; mapped ; 1E4B # 1.1 LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW +1E4B ; valid # 1.1 LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW +1E4C ; mapped ; 1E4D # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND ACUTE +1E4D ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND ACUTE +1E4E ; mapped ; 1E4F # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND DIAERESIS +1E4F ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND DIAERESIS +1E50 ; mapped ; 1E51 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND GRAVE +1E51 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND GRAVE +1E52 ; mapped ; 1E53 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND ACUTE +1E53 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND ACUTE +1E54 ; mapped ; 1E55 # 1.1 LATIN CAPITAL LETTER P WITH ACUTE +1E55 ; valid # 1.1 LATIN SMALL LETTER P WITH ACUTE +1E56 ; mapped ; 1E57 # 1.1 LATIN CAPITAL LETTER P WITH DOT ABOVE +1E57 ; valid # 1.1 LATIN SMALL LETTER P WITH DOT ABOVE +1E58 ; mapped ; 1E59 # 1.1 LATIN CAPITAL LETTER R WITH DOT ABOVE +1E59 ; valid # 1.1 LATIN SMALL LETTER R WITH DOT ABOVE +1E5A ; mapped ; 1E5B # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW +1E5B ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW +1E5C ; mapped ; 1E5D # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW AND MACRON +1E5D ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW AND MACRON +1E5E ; mapped ; 1E5F # 1.1 LATIN CAPITAL LETTER R WITH LINE BELOW +1E5F ; valid # 1.1 LATIN SMALL LETTER R WITH LINE BELOW +1E60 ; mapped ; 1E61 # 1.1 LATIN CAPITAL LETTER S WITH DOT ABOVE +1E61 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT ABOVE +1E62 ; mapped ; 1E63 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW +1E63 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW +1E64 ; mapped ; 1E65 # 1.1 LATIN CAPITAL LETTER S WITH ACUTE AND DOT ABOVE +1E65 ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE +1E66 ; mapped ; 1E67 # 1.1 LATIN CAPITAL LETTER S WITH CARON AND DOT ABOVE +1E67 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON AND DOT ABOVE +1E68 ; mapped ; 1E69 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW AND DOT ABOVE +1E69 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE +1E6A ; mapped ; 1E6B # 1.1 LATIN CAPITAL LETTER T WITH DOT ABOVE +1E6B ; valid # 1.1 LATIN SMALL LETTER T WITH DOT ABOVE +1E6C ; mapped ; 1E6D # 1.1 LATIN CAPITAL LETTER T WITH DOT BELOW +1E6D ; valid # 1.1 LATIN SMALL LETTER T WITH DOT BELOW +1E6E ; mapped ; 1E6F # 1.1 LATIN CAPITAL LETTER T WITH LINE BELOW +1E6F ; valid # 1.1 LATIN SMALL LETTER T WITH LINE BELOW +1E70 ; mapped ; 1E71 # 1.1 LATIN CAPITAL LETTER T WITH CIRCUMFLEX BELOW +1E71 ; valid # 1.1 LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW +1E72 ; mapped ; 1E73 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS BELOW +1E73 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS BELOW +1E74 ; mapped ; 1E75 # 1.1 LATIN CAPITAL LETTER U WITH TILDE BELOW +1E75 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE BELOW +1E76 ; mapped ; 1E77 # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX BELOW +1E77 ; valid # 1.1 LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW +1E78 ; mapped ; 1E79 # 1.1 LATIN CAPITAL LETTER U WITH TILDE AND ACUTE +1E79 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE AND ACUTE +1E7A ; mapped ; 1E7B # 1.1 LATIN CAPITAL LETTER U WITH MACRON AND DIAERESIS +1E7B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON AND DIAERESIS +1E7C ; mapped ; 1E7D # 1.1 LATIN CAPITAL LETTER V WITH TILDE +1E7D ; valid # 1.1 LATIN SMALL LETTER V WITH TILDE +1E7E ; mapped ; 1E7F # 1.1 LATIN CAPITAL LETTER V WITH DOT BELOW +1E7F ; valid # 1.1 LATIN SMALL LETTER V WITH DOT BELOW +1E80 ; mapped ; 1E81 # 1.1 LATIN CAPITAL LETTER W WITH GRAVE +1E81 ; valid # 1.1 LATIN SMALL LETTER W WITH GRAVE +1E82 ; mapped ; 1E83 # 1.1 LATIN CAPITAL LETTER W WITH ACUTE +1E83 ; valid # 1.1 LATIN SMALL LETTER W WITH ACUTE +1E84 ; mapped ; 1E85 # 1.1 LATIN CAPITAL LETTER W WITH DIAERESIS +1E85 ; valid # 1.1 LATIN SMALL LETTER W WITH DIAERESIS +1E86 ; mapped ; 1E87 # 1.1 LATIN CAPITAL LETTER W WITH DOT ABOVE +1E87 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT ABOVE +1E88 ; mapped ; 1E89 # 1.1 LATIN CAPITAL LETTER W WITH DOT BELOW +1E89 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT BELOW +1E8A ; mapped ; 1E8B # 1.1 LATIN CAPITAL LETTER X WITH DOT ABOVE +1E8B ; valid # 1.1 LATIN SMALL LETTER X WITH DOT ABOVE +1E8C ; mapped ; 1E8D # 1.1 LATIN CAPITAL LETTER X WITH DIAERESIS +1E8D ; valid # 1.1 LATIN SMALL LETTER X WITH DIAERESIS +1E8E ; mapped ; 1E8F # 1.1 LATIN CAPITAL LETTER Y WITH DOT ABOVE +1E8F ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT ABOVE +1E90 ; mapped ; 1E91 # 1.1 LATIN CAPITAL LETTER Z WITH CIRCUMFLEX +1E91 ; valid # 1.1 LATIN SMALL LETTER Z WITH CIRCUMFLEX +1E92 ; mapped ; 1E93 # 1.1 LATIN CAPITAL LETTER Z WITH DOT BELOW +1E93 ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT BELOW +1E94 ; mapped ; 1E95 # 1.1 LATIN CAPITAL LETTER Z WITH LINE BELOW +1E95..1E99 ; valid # 1.1 LATIN SMALL LETTER Z WITH LINE BELOW..LATIN SMALL LETTER Y WITH RING ABOVE +1E9A ; mapped ; 0061 02BE # 1.1 LATIN SMALL LETTER A WITH RIGHT HALF RING +1E9B ; mapped ; 1E61 # 2.0 LATIN SMALL LETTER LONG S WITH DOT ABOVE +1E9C..1E9D ; valid # 5.1 LATIN SMALL LETTER LONG S WITH DIAGONAL STROKE..LATIN SMALL LETTER LONG S WITH HIGH STROKE +1E9E ; mapped ; 0073 0073 # 5.1 LATIN CAPITAL LETTER SHARP S +1E9F ; valid # 5.1 LATIN SMALL LETTER DELTA +1EA0 ; mapped ; 1EA1 # 1.1 LATIN CAPITAL LETTER A WITH DOT BELOW +1EA1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT BELOW +1EA2 ; mapped ; 1EA3 # 1.1 LATIN CAPITAL LETTER A WITH HOOK ABOVE +1EA3 ; valid # 1.1 LATIN SMALL LETTER A WITH HOOK ABOVE +1EA4 ; mapped ; 1EA5 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE +1EA5 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE +1EA6 ; mapped ; 1EA7 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE +1EA7 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE +1EA8 ; mapped ; 1EA9 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE +1EA9 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE +1EAA ; mapped ; 1EAB # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE +1EAB ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE +1EAC ; mapped ; 1EAD # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW +1EAD ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW +1EAE ; mapped ; 1EAF # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND ACUTE +1EAF ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND ACUTE +1EB0 ; mapped ; 1EB1 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND GRAVE +1EB1 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND GRAVE +1EB2 ; mapped ; 1EB3 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE +1EB3 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE +1EB4 ; mapped ; 1EB5 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND TILDE +1EB5 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND TILDE +1EB6 ; mapped ; 1EB7 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW +1EB7 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND DOT BELOW +1EB8 ; mapped ; 1EB9 # 1.1 LATIN CAPITAL LETTER E WITH DOT BELOW +1EB9 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT BELOW +1EBA ; mapped ; 1EBB # 1.1 LATIN CAPITAL LETTER E WITH HOOK ABOVE +1EBB ; valid # 1.1 LATIN SMALL LETTER E WITH HOOK ABOVE +1EBC ; mapped ; 1EBD # 1.1 LATIN CAPITAL LETTER E WITH TILDE +1EBD ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE +1EBE ; mapped ; 1EBF # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE +1EBF ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE +1EC0 ; mapped ; 1EC1 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE +1EC1 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE +1EC2 ; mapped ; 1EC3 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE +1EC3 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE +1EC4 ; mapped ; 1EC5 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE +1EC5 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE +1EC6 ; mapped ; 1EC7 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW +1EC7 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW +1EC8 ; mapped ; 1EC9 # 1.1 LATIN CAPITAL LETTER I WITH HOOK ABOVE +1EC9 ; valid # 1.1 LATIN SMALL LETTER I WITH HOOK ABOVE +1ECA ; mapped ; 1ECB # 1.1 LATIN CAPITAL LETTER I WITH DOT BELOW +1ECB ; valid # 1.1 LATIN SMALL LETTER I WITH DOT BELOW +1ECC ; mapped ; 1ECD # 1.1 LATIN CAPITAL LETTER O WITH DOT BELOW +1ECD ; valid # 1.1 LATIN SMALL LETTER O WITH DOT BELOW +1ECE ; mapped ; 1ECF # 1.1 LATIN CAPITAL LETTER O WITH HOOK ABOVE +1ECF ; valid # 1.1 LATIN SMALL LETTER O WITH HOOK ABOVE +1ED0 ; mapped ; 1ED1 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE +1ED1 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE +1ED2 ; mapped ; 1ED3 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE +1ED3 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE +1ED4 ; mapped ; 1ED5 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE +1ED5 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE +1ED6 ; mapped ; 1ED7 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE +1ED7 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE +1ED8 ; mapped ; 1ED9 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW +1ED9 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW +1EDA ; mapped ; 1EDB # 1.1 LATIN CAPITAL LETTER O WITH HORN AND ACUTE +1EDB ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND ACUTE +1EDC ; mapped ; 1EDD # 1.1 LATIN CAPITAL LETTER O WITH HORN AND GRAVE +1EDD ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND GRAVE +1EDE ; mapped ; 1EDF # 1.1 LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE +1EDF ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE +1EE0 ; mapped ; 1EE1 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND TILDE +1EE1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND TILDE +1EE2 ; mapped ; 1EE3 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW +1EE3 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND DOT BELOW +1EE4 ; mapped ; 1EE5 # 1.1 LATIN CAPITAL LETTER U WITH DOT BELOW +1EE5 ; valid # 1.1 LATIN SMALL LETTER U WITH DOT BELOW +1EE6 ; mapped ; 1EE7 # 1.1 LATIN CAPITAL LETTER U WITH HOOK ABOVE +1EE7 ; valid # 1.1 LATIN SMALL LETTER U WITH HOOK ABOVE +1EE8 ; mapped ; 1EE9 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND ACUTE +1EE9 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND ACUTE +1EEA ; mapped ; 1EEB # 1.1 LATIN CAPITAL LETTER U WITH HORN AND GRAVE +1EEB ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND GRAVE +1EEC ; mapped ; 1EED # 1.1 LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE +1EED ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE +1EEE ; mapped ; 1EEF # 1.1 LATIN CAPITAL LETTER U WITH HORN AND TILDE +1EEF ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND TILDE +1EF0 ; mapped ; 1EF1 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW +1EF1 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND DOT BELOW +1EF2 ; mapped ; 1EF3 # 1.1 LATIN CAPITAL LETTER Y WITH GRAVE +1EF3 ; valid # 1.1 LATIN SMALL LETTER Y WITH GRAVE +1EF4 ; mapped ; 1EF5 # 1.1 LATIN CAPITAL LETTER Y WITH DOT BELOW +1EF5 ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT BELOW +1EF6 ; mapped ; 1EF7 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK ABOVE +1EF7 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK ABOVE +1EF8 ; mapped ; 1EF9 # 1.1 LATIN CAPITAL LETTER Y WITH TILDE +1EF9 ; valid # 1.1 LATIN SMALL LETTER Y WITH TILDE +1EFA ; mapped ; 1EFB # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH LL +1EFB ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH LL +1EFC ; mapped ; 1EFD # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH V +1EFD ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH V +1EFE ; mapped ; 1EFF # 5.1 LATIN CAPITAL LETTER Y WITH LOOP +1EFF ; valid # 5.1 LATIN SMALL LETTER Y WITH LOOP +1F00..1F07 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI +1F08 ; mapped ; 1F00 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI +1F09 ; mapped ; 1F01 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA +1F0A ; mapped ; 1F02 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA +1F0B ; mapped ; 1F03 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA +1F0C ; mapped ; 1F04 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA +1F0D ; mapped ; 1F05 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA +1F0E ; mapped ; 1F06 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI +1F0F ; mapped ; 1F07 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI +1F10..1F15 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA +1F16..1F17 ; disallowed # NA .. +1F18 ; mapped ; 1F10 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI +1F19 ; mapped ; 1F11 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA +1F1A ; mapped ; 1F12 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND VARIA +1F1B ; mapped ; 1F13 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND VARIA +1F1C ; mapped ; 1F14 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND OXIA +1F1D ; mapped ; 1F15 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA +1F1E..1F1F ; disallowed # NA .. +1F20..1F27 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI +1F28 ; mapped ; 1F20 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI +1F29 ; mapped ; 1F21 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA +1F2A ; mapped ; 1F22 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA +1F2B ; mapped ; 1F23 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA +1F2C ; mapped ; 1F24 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA +1F2D ; mapped ; 1F25 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA +1F2E ; mapped ; 1F26 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI +1F2F ; mapped ; 1F27 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI +1F30..1F37 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PSILI..GREEK SMALL LETTER IOTA WITH DASIA AND PERISPOMENI +1F38 ; mapped ; 1F30 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI +1F39 ; mapped ; 1F31 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA +1F3A ; mapped ; 1F32 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND VARIA +1F3B ; mapped ; 1F33 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND VARIA +1F3C ; mapped ; 1F34 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND OXIA +1F3D ; mapped ; 1F35 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND OXIA +1F3E ; mapped ; 1F36 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND PERISPOMENI +1F3F ; mapped ; 1F37 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND PERISPOMENI +1F40..1F45 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA +1F46..1F47 ; disallowed # NA .. +1F48 ; mapped ; 1F40 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI +1F49 ; mapped ; 1F41 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA +1F4A ; mapped ; 1F42 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND VARIA +1F4B ; mapped ; 1F43 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND VARIA +1F4C ; mapped ; 1F44 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND OXIA +1F4D ; mapped ; 1F45 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA +1F4E..1F4F ; disallowed # NA .. +1F50..1F57 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI +1F58 ; disallowed # NA +1F59 ; mapped ; 1F51 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA +1F5A ; disallowed # NA +1F5B ; mapped ; 1F53 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA +1F5C ; disallowed # NA +1F5D ; mapped ; 1F55 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA +1F5E ; disallowed # NA +1F5F ; mapped ; 1F57 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI +1F60..1F67 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI..GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI +1F68 ; mapped ; 1F60 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI +1F69 ; mapped ; 1F61 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA +1F6A ; mapped ; 1F62 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA +1F6B ; mapped ; 1F63 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA +1F6C ; mapped ; 1F64 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA +1F6D ; mapped ; 1F65 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA +1F6E ; mapped ; 1F66 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI +1F6F ; mapped ; 1F67 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI +1F70 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA +1F71 ; mapped ; 03AC # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA +1F72 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH VARIA +1F73 ; mapped ; 03AD # 1.1 GREEK SMALL LETTER EPSILON WITH OXIA +1F74 ; valid # 1.1 GREEK SMALL LETTER ETA WITH VARIA +1F75 ; mapped ; 03AE # 1.1 GREEK SMALL LETTER ETA WITH OXIA +1F76 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VARIA +1F77 ; mapped ; 03AF # 1.1 GREEK SMALL LETTER IOTA WITH OXIA +1F78 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH VARIA +1F79 ; mapped ; 03CC # 1.1 GREEK SMALL LETTER OMICRON WITH OXIA +1F7A ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VARIA +1F7B ; mapped ; 03CD # 1.1 GREEK SMALL LETTER UPSILON WITH OXIA +1F7C ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA +1F7D ; mapped ; 03CE # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA +1F7E..1F7F ; disallowed # NA .. +1F80 ; mapped ; 1F00 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI +1F81 ; mapped ; 1F01 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND YPOGEGRAMMENI +1F82 ; mapped ; 1F02 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1F83 ; mapped ; 1F03 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1F84 ; mapped ; 1F04 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1F85 ; mapped ; 1F05 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1F86 ; mapped ; 1F06 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1F87 ; mapped ; 1F07 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1F88 ; mapped ; 1F00 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI +1F89 ; mapped ; 1F01 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI +1F8A ; mapped ; 1F02 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1F8B ; mapped ; 1F03 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1F8C ; mapped ; 1F04 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1F8D ; mapped ; 1F05 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1F8E ; mapped ; 1F06 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1F8F ; mapped ; 1F07 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1F90 ; mapped ; 1F20 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND YPOGEGRAMMENI +1F91 ; mapped ; 1F21 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI +1F92 ; mapped ; 1F22 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1F93 ; mapped ; 1F23 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1F94 ; mapped ; 1F24 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1F95 ; mapped ; 1F25 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1F96 ; mapped ; 1F26 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1F97 ; mapped ; 1F27 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1F98 ; mapped ; 1F20 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI +1F99 ; mapped ; 1F21 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI +1F9A ; mapped ; 1F22 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1F9B ; mapped ; 1F23 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1F9C ; mapped ; 1F24 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1F9D ; mapped ; 1F25 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1F9E ; mapped ; 1F26 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1F9F ; mapped ; 1F27 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1FA0 ; mapped ; 1F60 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND YPOGEGRAMMENI +1FA1 ; mapped ; 1F61 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI +1FA2 ; mapped ; 1F62 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1FA3 ; mapped ; 1F63 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1FA4 ; mapped ; 1F64 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1FA5 ; mapped ; 1F65 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1FA6 ; mapped ; 1F66 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1FA7 ; mapped ; 1F67 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1FA8 ; mapped ; 1F60 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI +1FA9 ; mapped ; 1F61 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI +1FAA ; mapped ; 1F62 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1FAB ; mapped ; 1F63 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1FAC ; mapped ; 1F64 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1FAD ; mapped ; 1F65 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1FAE ; mapped ; 1F66 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1FAF ; mapped ; 1F67 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1FB0..1FB1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VRACHY..GREEK SMALL LETTER ALPHA WITH MACRON +1FB2 ; mapped ; 1F70 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA AND YPOGEGRAMMENI +1FB3 ; mapped ; 03B1 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI +1FB4 ; mapped ; 03AC 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI +1FB5 ; disallowed # NA +1FB6 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI +1FB7 ; mapped ; 1FB6 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND YPOGEGRAMMENI +1FB8 ; mapped ; 1FB0 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VRACHY +1FB9 ; mapped ; 1FB1 # 1.1 GREEK CAPITAL LETTER ALPHA WITH MACRON +1FBA ; mapped ; 1F70 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VARIA +1FBB ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH OXIA +1FBC ; mapped ; 03B1 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI +1FBD ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK KORONIS +1FBE ; mapped ; 03B9 # 1.1 GREEK PROSGEGRAMMENI +1FBF ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK PSILI +1FC0 ; disallowed_STD3_mapped ; 0020 0342 # 1.1 GREEK PERISPOMENI +1FC1 ; disallowed_STD3_mapped ; 0020 0308 0342 #1.1 GREEK DIALYTIKA AND PERISPOMENI +1FC2 ; mapped ; 1F74 03B9 # 1.1 GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI +1FC3 ; mapped ; 03B7 03B9 # 1.1 GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI +1FC4 ; mapped ; 03AE 03B9 # 1.1 GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI +1FC5 ; disallowed # NA +1FC6 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI +1FC7 ; mapped ; 1FC6 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI AND YPOGEGRAMMENI +1FC8 ; mapped ; 1F72 # 1.1 GREEK CAPITAL LETTER EPSILON WITH VARIA +1FC9 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH OXIA +1FCA ; mapped ; 1F74 # 1.1 GREEK CAPITAL LETTER ETA WITH VARIA +1FCB ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH OXIA +1FCC ; mapped ; 03B7 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI +1FCD ; disallowed_STD3_mapped ; 0020 0313 0300 #1.1 GREEK PSILI AND VARIA +1FCE ; disallowed_STD3_mapped ; 0020 0313 0301 #1.1 GREEK PSILI AND OXIA +1FCF ; disallowed_STD3_mapped ; 0020 0313 0342 #1.1 GREEK PSILI AND PERISPOMENI +1FD0..1FD2 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA +1FD3 ; mapped ; 0390 # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA +1FD4..1FD5 ; disallowed # NA .. +1FD6..1FD7 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI +1FD8 ; mapped ; 1FD0 # 1.1 GREEK CAPITAL LETTER IOTA WITH VRACHY +1FD9 ; mapped ; 1FD1 # 1.1 GREEK CAPITAL LETTER IOTA WITH MACRON +1FDA ; mapped ; 1F76 # 1.1 GREEK CAPITAL LETTER IOTA WITH VARIA +1FDB ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH OXIA +1FDC ; disallowed # NA +1FDD ; disallowed_STD3_mapped ; 0020 0314 0300 #1.1 GREEK DASIA AND VARIA +1FDE ; disallowed_STD3_mapped ; 0020 0314 0301 #1.1 GREEK DASIA AND OXIA +1FDF ; disallowed_STD3_mapped ; 0020 0314 0342 #1.1 GREEK DASIA AND PERISPOMENI +1FE0..1FE2 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA +1FE3 ; mapped ; 03B0 # 1.1 GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA +1FE4..1FE7 ; valid # 1.1 GREEK SMALL LETTER RHO WITH PSILI..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND PERISPOMENI +1FE8 ; mapped ; 1FE0 # 1.1 GREEK CAPITAL LETTER UPSILON WITH VRACHY +1FE9 ; mapped ; 1FE1 # 1.1 GREEK CAPITAL LETTER UPSILON WITH MACRON +1FEA ; mapped ; 1F7A # 1.1 GREEK CAPITAL LETTER UPSILON WITH VARIA +1FEB ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH OXIA +1FEC ; mapped ; 1FE5 # 1.1 GREEK CAPITAL LETTER RHO WITH DASIA +1FED ; disallowed_STD3_mapped ; 0020 0308 0300 #1.1 GREEK DIALYTIKA AND VARIA +1FEE ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA AND OXIA +1FEF ; disallowed_STD3_mapped ; 0060 # 1.1 GREEK VARIA +1FF0..1FF1 ; disallowed # NA .. +1FF2 ; mapped ; 1F7C 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI +1FF3 ; mapped ; 03C9 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI +1FF4 ; mapped ; 03CE 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI +1FF5 ; disallowed # NA +1FF6 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI +1FF7 ; mapped ; 1FF6 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI +1FF8 ; mapped ; 1F78 # 1.1 GREEK CAPITAL LETTER OMICRON WITH VARIA +1FF9 ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH OXIA +1FFA ; mapped ; 1F7C # 1.1 GREEK CAPITAL LETTER OMEGA WITH VARIA +1FFB ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH OXIA +1FFC ; mapped ; 03C9 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI +1FFD ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK OXIA +1FFE ; disallowed_STD3_mapped ; 0020 0314 # 1.1 GREEK DASIA +1FFF ; disallowed # NA +2000..200A ; disallowed_STD3_mapped ; 0020 # 1.1 EN QUAD..HAIR SPACE +200B ; ignored # 1.1 ZERO WIDTH SPACE +200C..200D ; deviation ; # 1.1 ZERO WIDTH NON-JOINER..ZERO WIDTH JOINER +200E..200F ; disallowed # 1.1 LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK +2010 ; valid ; ; NV8 # 1.1 HYPHEN +2011 ; mapped ; 2010 # 1.1 NON-BREAKING HYPHEN +2012..2016 ; valid ; ; NV8 # 1.1 FIGURE DASH..DOUBLE VERTICAL LINE +2017 ; disallowed_STD3_mapped ; 0020 0333 # 1.1 DOUBLE LOW LINE +2018..2023 ; valid ; ; NV8 # 1.1 LEFT SINGLE QUOTATION MARK..TRIANGULAR BULLET +2024..2026 ; disallowed # 1.1 ONE DOT LEADER..HORIZONTAL ELLIPSIS +2027 ; valid ; ; NV8 # 1.1 HYPHENATION POINT +2028..202E ; disallowed # 1.1 LINE SEPARATOR..RIGHT-TO-LEFT OVERRIDE +202F ; disallowed_STD3_mapped ; 0020 # 3.0 NARROW NO-BREAK SPACE +2030..2032 ; valid ; ; NV8 # 1.1 PER MILLE SIGN..PRIME +2033 ; mapped ; 2032 2032 # 1.1 DOUBLE PRIME +2034 ; mapped ; 2032 2032 2032 #1.1 TRIPLE PRIME +2035 ; valid ; ; NV8 # 1.1 REVERSED PRIME +2036 ; mapped ; 2035 2035 # 1.1 REVERSED DOUBLE PRIME +2037 ; mapped ; 2035 2035 2035 #1.1 REVERSED TRIPLE PRIME +2038..203B ; valid ; ; NV8 # 1.1 CARET..REFERENCE MARK +203C ; disallowed_STD3_mapped ; 0021 0021 # 1.1 DOUBLE EXCLAMATION MARK +203D ; valid ; ; NV8 # 1.1 INTERROBANG +203E ; disallowed_STD3_mapped ; 0020 0305 # 1.1 OVERLINE +203F..2046 ; valid ; ; NV8 # 1.1 UNDERTIE..RIGHT SQUARE BRACKET WITH QUILL +2047 ; disallowed_STD3_mapped ; 003F 003F # 3.2 DOUBLE QUESTION MARK +2048 ; disallowed_STD3_mapped ; 003F 0021 # 3.0 QUESTION EXCLAMATION MARK +2049 ; disallowed_STD3_mapped ; 0021 003F # 3.0 EXCLAMATION QUESTION MARK +204A..204D ; valid ; ; NV8 # 3.0 TIRONIAN SIGN ET..BLACK RIGHTWARDS BULLET +204E..2052 ; valid ; ; NV8 # 3.2 LOW ASTERISK..COMMERCIAL MINUS SIGN +2053..2054 ; valid ; ; NV8 # 4.0 SWUNG DASH..INVERTED UNDERTIE +2055..2056 ; valid ; ; NV8 # 4.1 FLOWER PUNCTUATION MARK..THREE DOT PUNCTUATION +2057 ; mapped ; 2032 2032 2032 2032 #3.2 QUADRUPLE PRIME +2058..205E ; valid ; ; NV8 # 4.1 FOUR DOT PUNCTUATION..VERTICAL FOUR DOTS +205F ; disallowed_STD3_mapped ; 0020 # 3.2 MEDIUM MATHEMATICAL SPACE +2060 ; ignored # 3.2 WORD JOINER +2061..2063 ; disallowed # 3.2 FUNCTION APPLICATION..INVISIBLE SEPARATOR +2064 ; ignored # 5.1 INVISIBLE PLUS +2065 ; disallowed # NA +2066..2069 ; disallowed # 6.3 LEFT-TO-RIGHT ISOLATE..POP DIRECTIONAL ISOLATE +206A..206F ; disallowed # 1.1 INHIBIT SYMMETRIC SWAPPING..NOMINAL DIGIT SHAPES +2070 ; mapped ; 0030 # 1.1 SUPERSCRIPT ZERO +2071 ; mapped ; 0069 # 3.2 SUPERSCRIPT LATIN SMALL LETTER I +2072..2073 ; disallowed # NA .. +2074 ; mapped ; 0034 # 1.1 SUPERSCRIPT FOUR +2075 ; mapped ; 0035 # 1.1 SUPERSCRIPT FIVE +2076 ; mapped ; 0036 # 1.1 SUPERSCRIPT SIX +2077 ; mapped ; 0037 # 1.1 SUPERSCRIPT SEVEN +2078 ; mapped ; 0038 # 1.1 SUPERSCRIPT EIGHT +2079 ; mapped ; 0039 # 1.1 SUPERSCRIPT NINE +207A ; disallowed_STD3_mapped ; 002B # 1.1 SUPERSCRIPT PLUS SIGN +207B ; mapped ; 2212 # 1.1 SUPERSCRIPT MINUS +207C ; disallowed_STD3_mapped ; 003D # 1.1 SUPERSCRIPT EQUALS SIGN +207D ; disallowed_STD3_mapped ; 0028 # 1.1 SUPERSCRIPT LEFT PARENTHESIS +207E ; disallowed_STD3_mapped ; 0029 # 1.1 SUPERSCRIPT RIGHT PARENTHESIS +207F ; mapped ; 006E # 1.1 SUPERSCRIPT LATIN SMALL LETTER N +2080 ; mapped ; 0030 # 1.1 SUBSCRIPT ZERO +2081 ; mapped ; 0031 # 1.1 SUBSCRIPT ONE +2082 ; mapped ; 0032 # 1.1 SUBSCRIPT TWO +2083 ; mapped ; 0033 # 1.1 SUBSCRIPT THREE +2084 ; mapped ; 0034 # 1.1 SUBSCRIPT FOUR +2085 ; mapped ; 0035 # 1.1 SUBSCRIPT FIVE +2086 ; mapped ; 0036 # 1.1 SUBSCRIPT SIX +2087 ; mapped ; 0037 # 1.1 SUBSCRIPT SEVEN +2088 ; mapped ; 0038 # 1.1 SUBSCRIPT EIGHT +2089 ; mapped ; 0039 # 1.1 SUBSCRIPT NINE +208A ; disallowed_STD3_mapped ; 002B # 1.1 SUBSCRIPT PLUS SIGN +208B ; mapped ; 2212 # 1.1 SUBSCRIPT MINUS +208C ; disallowed_STD3_mapped ; 003D # 1.1 SUBSCRIPT EQUALS SIGN +208D ; disallowed_STD3_mapped ; 0028 # 1.1 SUBSCRIPT LEFT PARENTHESIS +208E ; disallowed_STD3_mapped ; 0029 # 1.1 SUBSCRIPT RIGHT PARENTHESIS +208F ; disallowed # NA +2090 ; mapped ; 0061 # 4.1 LATIN SUBSCRIPT SMALL LETTER A +2091 ; mapped ; 0065 # 4.1 LATIN SUBSCRIPT SMALL LETTER E +2092 ; mapped ; 006F # 4.1 LATIN SUBSCRIPT SMALL LETTER O +2093 ; mapped ; 0078 # 4.1 LATIN SUBSCRIPT SMALL LETTER X +2094 ; mapped ; 0259 # 4.1 LATIN SUBSCRIPT SMALL LETTER SCHWA +2095 ; mapped ; 0068 # 6.0 LATIN SUBSCRIPT SMALL LETTER H +2096 ; mapped ; 006B # 6.0 LATIN SUBSCRIPT SMALL LETTER K +2097 ; mapped ; 006C # 6.0 LATIN SUBSCRIPT SMALL LETTER L +2098 ; mapped ; 006D # 6.0 LATIN SUBSCRIPT SMALL LETTER M +2099 ; mapped ; 006E # 6.0 LATIN SUBSCRIPT SMALL LETTER N +209A ; mapped ; 0070 # 6.0 LATIN SUBSCRIPT SMALL LETTER P +209B ; mapped ; 0073 # 6.0 LATIN SUBSCRIPT SMALL LETTER S +209C ; mapped ; 0074 # 6.0 LATIN SUBSCRIPT SMALL LETTER T +209D..209F ; disallowed # NA .. +20A0..20A7 ; valid ; ; NV8 # 1.1 EURO-CURRENCY SIGN..PESETA SIGN +20A8 ; mapped ; 0072 0073 # 1.1 RUPEE SIGN +20A9..20AA ; valid ; ; NV8 # 1.1 WON SIGN..NEW SHEQEL SIGN +20AB ; valid ; ; NV8 # 2.0 DONG SIGN +20AC ; valid ; ; NV8 # 2.1 EURO SIGN +20AD..20AF ; valid ; ; NV8 # 3.0 KIP SIGN..DRACHMA SIGN +20B0..20B1 ; valid ; ; NV8 # 3.2 GERMAN PENNY SIGN..PESO SIGN +20B2..20B5 ; valid ; ; NV8 # 4.1 GUARANI SIGN..CEDI SIGN +20B6..20B8 ; valid ; ; NV8 # 5.2 LIVRE TOURNOIS SIGN..TENGE SIGN +20B9 ; valid ; ; NV8 # 6.0 INDIAN RUPEE SIGN +20BA ; valid ; ; NV8 # 6.2 TURKISH LIRA SIGN +20BB..20BD ; valid ; ; NV8 # 7.0 NORDIC MARK SIGN..RUBLE SIGN +20BE ; valid ; ; NV8 # 8.0 LARI SIGN +20BF..20CF ; disallowed # NA .. +20D0..20E1 ; valid ; ; NV8 # 1.1 COMBINING LEFT HARPOON ABOVE..COMBINING LEFT RIGHT ARROW ABOVE +20E2..20E3 ; valid ; ; NV8 # 3.0 COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING KEYCAP +20E4..20EA ; valid ; ; NV8 # 3.2 COMBINING ENCLOSING UPWARD POINTING TRIANGLE..COMBINING LEFTWARDS ARROW OVERLAY +20EB ; valid ; ; NV8 # 4.1 COMBINING LONG DOUBLE SOLIDUS OVERLAY +20EC..20EF ; valid ; ; NV8 # 5.0 COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS..COMBINING RIGHT ARROW BELOW +20F0 ; valid ; ; NV8 # 5.1 COMBINING ASTERISK ABOVE +20F1..20FF ; disallowed # NA .. +2100 ; disallowed_STD3_mapped ; 0061 002F 0063 #1.1 ACCOUNT OF +2101 ; disallowed_STD3_mapped ; 0061 002F 0073 #1.1 ADDRESSED TO THE SUBJECT +2102 ; mapped ; 0063 # 1.1 DOUBLE-STRUCK CAPITAL C +2103 ; mapped ; 00B0 0063 # 1.1 DEGREE CELSIUS +2104 ; valid ; ; NV8 # 1.1 CENTRE LINE SYMBOL +2105 ; disallowed_STD3_mapped ; 0063 002F 006F #1.1 CARE OF +2106 ; disallowed_STD3_mapped ; 0063 002F 0075 #1.1 CADA UNA +2107 ; mapped ; 025B # 1.1 EULER CONSTANT +2108 ; valid ; ; NV8 # 1.1 SCRUPLE +2109 ; mapped ; 00B0 0066 # 1.1 DEGREE FAHRENHEIT +210A ; mapped ; 0067 # 1.1 SCRIPT SMALL G +210B..210E ; mapped ; 0068 # 1.1 SCRIPT CAPITAL H..PLANCK CONSTANT +210F ; mapped ; 0127 # 1.1 PLANCK CONSTANT OVER TWO PI +2110..2111 ; mapped ; 0069 # 1.1 SCRIPT CAPITAL I..BLACK-LETTER CAPITAL I +2112..2113 ; mapped ; 006C # 1.1 SCRIPT CAPITAL L..SCRIPT SMALL L +2114 ; valid ; ; NV8 # 1.1 L B BAR SYMBOL +2115 ; mapped ; 006E # 1.1 DOUBLE-STRUCK CAPITAL N +2116 ; mapped ; 006E 006F # 1.1 NUMERO SIGN +2117..2118 ; valid ; ; NV8 # 1.1 SOUND RECORDING COPYRIGHT..SCRIPT CAPITAL P +2119 ; mapped ; 0070 # 1.1 DOUBLE-STRUCK CAPITAL P +211A ; mapped ; 0071 # 1.1 DOUBLE-STRUCK CAPITAL Q +211B..211D ; mapped ; 0072 # 1.1 SCRIPT CAPITAL R..DOUBLE-STRUCK CAPITAL R +211E..211F ; valid ; ; NV8 # 1.1 PRESCRIPTION TAKE..RESPONSE +2120 ; mapped ; 0073 006D # 1.1 SERVICE MARK +2121 ; mapped ; 0074 0065 006C #1.1 TELEPHONE SIGN +2122 ; mapped ; 0074 006D # 1.1 TRADE MARK SIGN +2123 ; valid ; ; NV8 # 1.1 VERSICLE +2124 ; mapped ; 007A # 1.1 DOUBLE-STRUCK CAPITAL Z +2125 ; valid ; ; NV8 # 1.1 OUNCE SIGN +2126 ; mapped ; 03C9 # 1.1 OHM SIGN +2127 ; valid ; ; NV8 # 1.1 INVERTED OHM SIGN +2128 ; mapped ; 007A # 1.1 BLACK-LETTER CAPITAL Z +2129 ; valid ; ; NV8 # 1.1 TURNED GREEK SMALL LETTER IOTA +212A ; mapped ; 006B # 1.1 KELVIN SIGN +212B ; mapped ; 00E5 # 1.1 ANGSTROM SIGN +212C ; mapped ; 0062 # 1.1 SCRIPT CAPITAL B +212D ; mapped ; 0063 # 1.1 BLACK-LETTER CAPITAL C +212E ; valid ; ; NV8 # 1.1 ESTIMATED SYMBOL +212F..2130 ; mapped ; 0065 # 1.1 SCRIPT SMALL E..SCRIPT CAPITAL E +2131 ; mapped ; 0066 # 1.1 SCRIPT CAPITAL F +2132 ; disallowed # 1.1 TURNED CAPITAL F +2133 ; mapped ; 006D # 1.1 SCRIPT CAPITAL M +2134 ; mapped ; 006F # 1.1 SCRIPT SMALL O +2135 ; mapped ; 05D0 # 1.1 ALEF SYMBOL +2136 ; mapped ; 05D1 # 1.1 BET SYMBOL +2137 ; mapped ; 05D2 # 1.1 GIMEL SYMBOL +2138 ; mapped ; 05D3 # 1.1 DALET SYMBOL +2139 ; mapped ; 0069 # 3.0 INFORMATION SOURCE +213A ; valid ; ; NV8 # 3.0 ROTATED CAPITAL Q +213B ; mapped ; 0066 0061 0078 #4.0 FACSIMILE SIGN +213C ; mapped ; 03C0 # 4.1 DOUBLE-STRUCK SMALL PI +213D..213E ; mapped ; 03B3 # 3.2 DOUBLE-STRUCK SMALL GAMMA..DOUBLE-STRUCK CAPITAL GAMMA +213F ; mapped ; 03C0 # 3.2 DOUBLE-STRUCK CAPITAL PI +2140 ; mapped ; 2211 # 3.2 DOUBLE-STRUCK N-ARY SUMMATION +2141..2144 ; valid ; ; NV8 # 3.2 TURNED SANS-SERIF CAPITAL G..TURNED SANS-SERIF CAPITAL Y +2145..2146 ; mapped ; 0064 # 3.2 DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL D +2147 ; mapped ; 0065 # 3.2 DOUBLE-STRUCK ITALIC SMALL E +2148 ; mapped ; 0069 # 3.2 DOUBLE-STRUCK ITALIC SMALL I +2149 ; mapped ; 006A # 3.2 DOUBLE-STRUCK ITALIC SMALL J +214A..214B ; valid ; ; NV8 # 3.2 PROPERTY LINE..TURNED AMPERSAND +214C ; valid ; ; NV8 # 4.1 PER SIGN +214D ; valid ; ; NV8 # 5.0 AKTIESELSKAB +214E ; valid # 5.0 TURNED SMALL F +214F ; valid ; ; NV8 # 5.1 SYMBOL FOR SAMARITAN SOURCE +2150 ; mapped ; 0031 2044 0037 #5.2 VULGAR FRACTION ONE SEVENTH +2151 ; mapped ; 0031 2044 0039 #5.2 VULGAR FRACTION ONE NINTH +2152 ; mapped ; 0031 2044 0031 0030 #5.2 VULGAR FRACTION ONE TENTH +2153 ; mapped ; 0031 2044 0033 #1.1 VULGAR FRACTION ONE THIRD +2154 ; mapped ; 0032 2044 0033 #1.1 VULGAR FRACTION TWO THIRDS +2155 ; mapped ; 0031 2044 0035 #1.1 VULGAR FRACTION ONE FIFTH +2156 ; mapped ; 0032 2044 0035 #1.1 VULGAR FRACTION TWO FIFTHS +2157 ; mapped ; 0033 2044 0035 #1.1 VULGAR FRACTION THREE FIFTHS +2158 ; mapped ; 0034 2044 0035 #1.1 VULGAR FRACTION FOUR FIFTHS +2159 ; mapped ; 0031 2044 0036 #1.1 VULGAR FRACTION ONE SIXTH +215A ; mapped ; 0035 2044 0036 #1.1 VULGAR FRACTION FIVE SIXTHS +215B ; mapped ; 0031 2044 0038 #1.1 VULGAR FRACTION ONE EIGHTH +215C ; mapped ; 0033 2044 0038 #1.1 VULGAR FRACTION THREE EIGHTHS +215D ; mapped ; 0035 2044 0038 #1.1 VULGAR FRACTION FIVE EIGHTHS +215E ; mapped ; 0037 2044 0038 #1.1 VULGAR FRACTION SEVEN EIGHTHS +215F ; mapped ; 0031 2044 # 1.1 FRACTION NUMERATOR ONE +2160 ; mapped ; 0069 # 1.1 ROMAN NUMERAL ONE +2161 ; mapped ; 0069 0069 # 1.1 ROMAN NUMERAL TWO +2162 ; mapped ; 0069 0069 0069 #1.1 ROMAN NUMERAL THREE +2163 ; mapped ; 0069 0076 # 1.1 ROMAN NUMERAL FOUR +2164 ; mapped ; 0076 # 1.1 ROMAN NUMERAL FIVE +2165 ; mapped ; 0076 0069 # 1.1 ROMAN NUMERAL SIX +2166 ; mapped ; 0076 0069 0069 #1.1 ROMAN NUMERAL SEVEN +2167 ; mapped ; 0076 0069 0069 0069 #1.1 ROMAN NUMERAL EIGHT +2168 ; mapped ; 0069 0078 # 1.1 ROMAN NUMERAL NINE +2169 ; mapped ; 0078 # 1.1 ROMAN NUMERAL TEN +216A ; mapped ; 0078 0069 # 1.1 ROMAN NUMERAL ELEVEN +216B ; mapped ; 0078 0069 0069 #1.1 ROMAN NUMERAL TWELVE +216C ; mapped ; 006C # 1.1 ROMAN NUMERAL FIFTY +216D ; mapped ; 0063 # 1.1 ROMAN NUMERAL ONE HUNDRED +216E ; mapped ; 0064 # 1.1 ROMAN NUMERAL FIVE HUNDRED +216F ; mapped ; 006D # 1.1 ROMAN NUMERAL ONE THOUSAND +2170 ; mapped ; 0069 # 1.1 SMALL ROMAN NUMERAL ONE +2171 ; mapped ; 0069 0069 # 1.1 SMALL ROMAN NUMERAL TWO +2172 ; mapped ; 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL THREE +2173 ; mapped ; 0069 0076 # 1.1 SMALL ROMAN NUMERAL FOUR +2174 ; mapped ; 0076 # 1.1 SMALL ROMAN NUMERAL FIVE +2175 ; mapped ; 0076 0069 # 1.1 SMALL ROMAN NUMERAL SIX +2176 ; mapped ; 0076 0069 0069 #1.1 SMALL ROMAN NUMERAL SEVEN +2177 ; mapped ; 0076 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL EIGHT +2178 ; mapped ; 0069 0078 # 1.1 SMALL ROMAN NUMERAL NINE +2179 ; mapped ; 0078 # 1.1 SMALL ROMAN NUMERAL TEN +217A ; mapped ; 0078 0069 # 1.1 SMALL ROMAN NUMERAL ELEVEN +217B ; mapped ; 0078 0069 0069 #1.1 SMALL ROMAN NUMERAL TWELVE +217C ; mapped ; 006C # 1.1 SMALL ROMAN NUMERAL FIFTY +217D ; mapped ; 0063 # 1.1 SMALL ROMAN NUMERAL ONE HUNDRED +217E ; mapped ; 0064 # 1.1 SMALL ROMAN NUMERAL FIVE HUNDRED +217F ; mapped ; 006D # 1.1 SMALL ROMAN NUMERAL ONE THOUSAND +2180..2182 ; valid ; ; NV8 # 1.1 ROMAN NUMERAL ONE THOUSAND C D..ROMAN NUMERAL TEN THOUSAND +2183 ; disallowed # 3.0 ROMAN NUMERAL REVERSED ONE HUNDRED +2184 ; valid # 5.0 LATIN SMALL LETTER REVERSED C +2185..2188 ; valid ; ; NV8 # 5.1 ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND +2189 ; mapped ; 0030 2044 0033 #5.2 VULGAR FRACTION ZERO THIRDS +218A..218B ; valid ; ; NV8 # 8.0 TURNED DIGIT TWO..TURNED DIGIT THREE +218C..218F ; disallowed # NA .. +2190..21EA ; valid ; ; NV8 # 1.1 LEFTWARDS ARROW..UPWARDS WHITE ARROW FROM BAR +21EB..21F3 ; valid ; ; NV8 # 3.0 UPWARDS WHITE ARROW ON PEDESTAL..UP DOWN WHITE ARROW +21F4..21FF ; valid ; ; NV8 # 3.2 RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW +2200..222B ; valid ; ; NV8 # 1.1 FOR ALL..INTEGRAL +222C ; mapped ; 222B 222B # 1.1 DOUBLE INTEGRAL +222D ; mapped ; 222B 222B 222B #1.1 TRIPLE INTEGRAL +222E ; valid ; ; NV8 # 1.1 CONTOUR INTEGRAL +222F ; mapped ; 222E 222E # 1.1 SURFACE INTEGRAL +2230 ; mapped ; 222E 222E 222E #1.1 VOLUME INTEGRAL +2231..225F ; valid ; ; NV8 # 1.1 CLOCKWISE INTEGRAL..QUESTIONED EQUAL TO +2260 ; disallowed_STD3_valid # 1.1 NOT EQUAL TO +2261..226D ; valid ; ; NV8 # 1.1 IDENTICAL TO..NOT EQUIVALENT TO +226E..226F ; disallowed_STD3_valid # 1.1 NOT LESS-THAN..NOT GREATER-THAN +2270..22F1 ; valid ; ; NV8 # 1.1 NEITHER LESS-THAN NOR EQUAL TO..DOWN RIGHT DIAGONAL ELLIPSIS +22F2..22FF ; valid ; ; NV8 # 3.2 ELEMENT OF WITH LONG HORIZONTAL STROKE..Z NOTATION BAG MEMBERSHIP +2300 ; valid ; ; NV8 # 1.1 DIAMETER SIGN +2301 ; valid ; ; NV8 # 3.0 ELECTRIC ARROW +2302..2328 ; valid ; ; NV8 # 1.1 HOUSE..KEYBOARD +2329 ; mapped ; 3008 # 1.1 LEFT-POINTING ANGLE BRACKET +232A ; mapped ; 3009 # 1.1 RIGHT-POINTING ANGLE BRACKET +232B..237A ; valid ; ; NV8 # 1.1 ERASE TO THE LEFT..APL FUNCTIONAL SYMBOL ALPHA +237B ; valid ; ; NV8 # 3.0 NOT CHECK MARK +237C ; valid ; ; NV8 # 3.2 RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW +237D..239A ; valid ; ; NV8 # 3.0 SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL +239B..23CE ; valid ; ; NV8 # 3.2 LEFT PARENTHESIS UPPER HOOK..RETURN SYMBOL +23CF..23D0 ; valid ; ; NV8 # 4.0 EJECT SYMBOL..VERTICAL LINE EXTENSION +23D1..23DB ; valid ; ; NV8 # 4.1 METRICAL BREVE..FUSE +23DC..23E7 ; valid ; ; NV8 # 5.0 TOP PARENTHESIS..ELECTRICAL INTERSECTION +23E8 ; valid ; ; NV8 # 5.2 DECIMAL EXPONENT SYMBOL +23E9..23F3 ; valid ; ; NV8 # 6.0 BLACK RIGHT-POINTING DOUBLE TRIANGLE..HOURGLASS WITH FLOWING SAND +23F4..23FA ; valid ; ; NV8 # 7.0 BLACK MEDIUM LEFT-POINTING TRIANGLE..BLACK CIRCLE FOR RECORD +23FB..23FE ; valid ; ; NV8 # 9.0 POWER SYMBOL..POWER SLEEP SYMBOL +23FF ; disallowed # NA +2400..2424 ; valid ; ; NV8 # 1.1 SYMBOL FOR NULL..SYMBOL FOR NEWLINE +2425..2426 ; valid ; ; NV8 # 3.0 SYMBOL FOR DELETE FORM TWO..SYMBOL FOR SUBSTITUTE FORM TWO +2427..243F ; disallowed # NA .. +2440..244A ; valid ; ; NV8 # 1.1 OCR HOOK..OCR DOUBLE BACKSLASH +244B..245F ; disallowed # NA .. +2460 ; mapped ; 0031 # 1.1 CIRCLED DIGIT ONE +2461 ; mapped ; 0032 # 1.1 CIRCLED DIGIT TWO +2462 ; mapped ; 0033 # 1.1 CIRCLED DIGIT THREE +2463 ; mapped ; 0034 # 1.1 CIRCLED DIGIT FOUR +2464 ; mapped ; 0035 # 1.1 CIRCLED DIGIT FIVE +2465 ; mapped ; 0036 # 1.1 CIRCLED DIGIT SIX +2466 ; mapped ; 0037 # 1.1 CIRCLED DIGIT SEVEN +2467 ; mapped ; 0038 # 1.1 CIRCLED DIGIT EIGHT +2468 ; mapped ; 0039 # 1.1 CIRCLED DIGIT NINE +2469 ; mapped ; 0031 0030 # 1.1 CIRCLED NUMBER TEN +246A ; mapped ; 0031 0031 # 1.1 CIRCLED NUMBER ELEVEN +246B ; mapped ; 0031 0032 # 1.1 CIRCLED NUMBER TWELVE +246C ; mapped ; 0031 0033 # 1.1 CIRCLED NUMBER THIRTEEN +246D ; mapped ; 0031 0034 # 1.1 CIRCLED NUMBER FOURTEEN +246E ; mapped ; 0031 0035 # 1.1 CIRCLED NUMBER FIFTEEN +246F ; mapped ; 0031 0036 # 1.1 CIRCLED NUMBER SIXTEEN +2470 ; mapped ; 0031 0037 # 1.1 CIRCLED NUMBER SEVENTEEN +2471 ; mapped ; 0031 0038 # 1.1 CIRCLED NUMBER EIGHTEEN +2472 ; mapped ; 0031 0039 # 1.1 CIRCLED NUMBER NINETEEN +2473 ; mapped ; 0032 0030 # 1.1 CIRCLED NUMBER TWENTY +2474 ; disallowed_STD3_mapped ; 0028 0031 0029 #1.1 PARENTHESIZED DIGIT ONE +2475 ; disallowed_STD3_mapped ; 0028 0032 0029 #1.1 PARENTHESIZED DIGIT TWO +2476 ; disallowed_STD3_mapped ; 0028 0033 0029 #1.1 PARENTHESIZED DIGIT THREE +2477 ; disallowed_STD3_mapped ; 0028 0034 0029 #1.1 PARENTHESIZED DIGIT FOUR +2478 ; disallowed_STD3_mapped ; 0028 0035 0029 #1.1 PARENTHESIZED DIGIT FIVE +2479 ; disallowed_STD3_mapped ; 0028 0036 0029 #1.1 PARENTHESIZED DIGIT SIX +247A ; disallowed_STD3_mapped ; 0028 0037 0029 #1.1 PARENTHESIZED DIGIT SEVEN +247B ; disallowed_STD3_mapped ; 0028 0038 0029 #1.1 PARENTHESIZED DIGIT EIGHT +247C ; disallowed_STD3_mapped ; 0028 0039 0029 #1.1 PARENTHESIZED DIGIT NINE +247D ; disallowed_STD3_mapped ; 0028 0031 0030 0029 #1.1 PARENTHESIZED NUMBER TEN +247E ; disallowed_STD3_mapped ; 0028 0031 0031 0029 #1.1 PARENTHESIZED NUMBER ELEVEN +247F ; disallowed_STD3_mapped ; 0028 0031 0032 0029 #1.1 PARENTHESIZED NUMBER TWELVE +2480 ; disallowed_STD3_mapped ; 0028 0031 0033 0029 #1.1 PARENTHESIZED NUMBER THIRTEEN +2481 ; disallowed_STD3_mapped ; 0028 0031 0034 0029 #1.1 PARENTHESIZED NUMBER FOURTEEN +2482 ; disallowed_STD3_mapped ; 0028 0031 0035 0029 #1.1 PARENTHESIZED NUMBER FIFTEEN +2483 ; disallowed_STD3_mapped ; 0028 0031 0036 0029 #1.1 PARENTHESIZED NUMBER SIXTEEN +2484 ; disallowed_STD3_mapped ; 0028 0031 0037 0029 #1.1 PARENTHESIZED NUMBER SEVENTEEN +2485 ; disallowed_STD3_mapped ; 0028 0031 0038 0029 #1.1 PARENTHESIZED NUMBER EIGHTEEN +2486 ; disallowed_STD3_mapped ; 0028 0031 0039 0029 #1.1 PARENTHESIZED NUMBER NINETEEN +2487 ; disallowed_STD3_mapped ; 0028 0032 0030 0029 #1.1 PARENTHESIZED NUMBER TWENTY +2488..249B ; disallowed # 1.1 DIGIT ONE FULL STOP..NUMBER TWENTY FULL STOP +249C ; disallowed_STD3_mapped ; 0028 0061 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER A +249D ; disallowed_STD3_mapped ; 0028 0062 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER B +249E ; disallowed_STD3_mapped ; 0028 0063 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER C +249F ; disallowed_STD3_mapped ; 0028 0064 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER D +24A0 ; disallowed_STD3_mapped ; 0028 0065 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER E +24A1 ; disallowed_STD3_mapped ; 0028 0066 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER F +24A2 ; disallowed_STD3_mapped ; 0028 0067 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER G +24A3 ; disallowed_STD3_mapped ; 0028 0068 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER H +24A4 ; disallowed_STD3_mapped ; 0028 0069 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER I +24A5 ; disallowed_STD3_mapped ; 0028 006A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER J +24A6 ; disallowed_STD3_mapped ; 0028 006B 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER K +24A7 ; disallowed_STD3_mapped ; 0028 006C 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER L +24A8 ; disallowed_STD3_mapped ; 0028 006D 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER M +24A9 ; disallowed_STD3_mapped ; 0028 006E 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER N +24AA ; disallowed_STD3_mapped ; 0028 006F 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER O +24AB ; disallowed_STD3_mapped ; 0028 0070 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER P +24AC ; disallowed_STD3_mapped ; 0028 0071 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Q +24AD ; disallowed_STD3_mapped ; 0028 0072 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER R +24AE ; disallowed_STD3_mapped ; 0028 0073 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER S +24AF ; disallowed_STD3_mapped ; 0028 0074 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER T +24B0 ; disallowed_STD3_mapped ; 0028 0075 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER U +24B1 ; disallowed_STD3_mapped ; 0028 0076 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER V +24B2 ; disallowed_STD3_mapped ; 0028 0077 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER W +24B3 ; disallowed_STD3_mapped ; 0028 0078 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER X +24B4 ; disallowed_STD3_mapped ; 0028 0079 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Y +24B5 ; disallowed_STD3_mapped ; 0028 007A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Z +24B6 ; mapped ; 0061 # 1.1 CIRCLED LATIN CAPITAL LETTER A +24B7 ; mapped ; 0062 # 1.1 CIRCLED LATIN CAPITAL LETTER B +24B8 ; mapped ; 0063 # 1.1 CIRCLED LATIN CAPITAL LETTER C +24B9 ; mapped ; 0064 # 1.1 CIRCLED LATIN CAPITAL LETTER D +24BA ; mapped ; 0065 # 1.1 CIRCLED LATIN CAPITAL LETTER E +24BB ; mapped ; 0066 # 1.1 CIRCLED LATIN CAPITAL LETTER F +24BC ; mapped ; 0067 # 1.1 CIRCLED LATIN CAPITAL LETTER G +24BD ; mapped ; 0068 # 1.1 CIRCLED LATIN CAPITAL LETTER H +24BE ; mapped ; 0069 # 1.1 CIRCLED LATIN CAPITAL LETTER I +24BF ; mapped ; 006A # 1.1 CIRCLED LATIN CAPITAL LETTER J +24C0 ; mapped ; 006B # 1.1 CIRCLED LATIN CAPITAL LETTER K +24C1 ; mapped ; 006C # 1.1 CIRCLED LATIN CAPITAL LETTER L +24C2 ; mapped ; 006D # 1.1 CIRCLED LATIN CAPITAL LETTER M +24C3 ; mapped ; 006E # 1.1 CIRCLED LATIN CAPITAL LETTER N +24C4 ; mapped ; 006F # 1.1 CIRCLED LATIN CAPITAL LETTER O +24C5 ; mapped ; 0070 # 1.1 CIRCLED LATIN CAPITAL LETTER P +24C6 ; mapped ; 0071 # 1.1 CIRCLED LATIN CAPITAL LETTER Q +24C7 ; mapped ; 0072 # 1.1 CIRCLED LATIN CAPITAL LETTER R +24C8 ; mapped ; 0073 # 1.1 CIRCLED LATIN CAPITAL LETTER S +24C9 ; mapped ; 0074 # 1.1 CIRCLED LATIN CAPITAL LETTER T +24CA ; mapped ; 0075 # 1.1 CIRCLED LATIN CAPITAL LETTER U +24CB ; mapped ; 0076 # 1.1 CIRCLED LATIN CAPITAL LETTER V +24CC ; mapped ; 0077 # 1.1 CIRCLED LATIN CAPITAL LETTER W +24CD ; mapped ; 0078 # 1.1 CIRCLED LATIN CAPITAL LETTER X +24CE ; mapped ; 0079 # 1.1 CIRCLED LATIN CAPITAL LETTER Y +24CF ; mapped ; 007A # 1.1 CIRCLED LATIN CAPITAL LETTER Z +24D0 ; mapped ; 0061 # 1.1 CIRCLED LATIN SMALL LETTER A +24D1 ; mapped ; 0062 # 1.1 CIRCLED LATIN SMALL LETTER B +24D2 ; mapped ; 0063 # 1.1 CIRCLED LATIN SMALL LETTER C +24D3 ; mapped ; 0064 # 1.1 CIRCLED LATIN SMALL LETTER D +24D4 ; mapped ; 0065 # 1.1 CIRCLED LATIN SMALL LETTER E +24D5 ; mapped ; 0066 # 1.1 CIRCLED LATIN SMALL LETTER F +24D6 ; mapped ; 0067 # 1.1 CIRCLED LATIN SMALL LETTER G +24D7 ; mapped ; 0068 # 1.1 CIRCLED LATIN SMALL LETTER H +24D8 ; mapped ; 0069 # 1.1 CIRCLED LATIN SMALL LETTER I +24D9 ; mapped ; 006A # 1.1 CIRCLED LATIN SMALL LETTER J +24DA ; mapped ; 006B # 1.1 CIRCLED LATIN SMALL LETTER K +24DB ; mapped ; 006C # 1.1 CIRCLED LATIN SMALL LETTER L +24DC ; mapped ; 006D # 1.1 CIRCLED LATIN SMALL LETTER M +24DD ; mapped ; 006E # 1.1 CIRCLED LATIN SMALL LETTER N +24DE ; mapped ; 006F # 1.1 CIRCLED LATIN SMALL LETTER O +24DF ; mapped ; 0070 # 1.1 CIRCLED LATIN SMALL LETTER P +24E0 ; mapped ; 0071 # 1.1 CIRCLED LATIN SMALL LETTER Q +24E1 ; mapped ; 0072 # 1.1 CIRCLED LATIN SMALL LETTER R +24E2 ; mapped ; 0073 # 1.1 CIRCLED LATIN SMALL LETTER S +24E3 ; mapped ; 0074 # 1.1 CIRCLED LATIN SMALL LETTER T +24E4 ; mapped ; 0075 # 1.1 CIRCLED LATIN SMALL LETTER U +24E5 ; mapped ; 0076 # 1.1 CIRCLED LATIN SMALL LETTER V +24E6 ; mapped ; 0077 # 1.1 CIRCLED LATIN SMALL LETTER W +24E7 ; mapped ; 0078 # 1.1 CIRCLED LATIN SMALL LETTER X +24E8 ; mapped ; 0079 # 1.1 CIRCLED LATIN SMALL LETTER Y +24E9 ; mapped ; 007A # 1.1 CIRCLED LATIN SMALL LETTER Z +24EA ; mapped ; 0030 # 1.1 CIRCLED DIGIT ZERO +24EB..24FE ; valid ; ; NV8 # 3.2 NEGATIVE CIRCLED NUMBER ELEVEN..DOUBLE CIRCLED NUMBER TEN +24FF ; valid ; ; NV8 # 4.0 NEGATIVE CIRCLED DIGIT ZERO +2500..2595 ; valid ; ; NV8 # 1.1 BOX DRAWINGS LIGHT HORIZONTAL..RIGHT ONE EIGHTH BLOCK +2596..259F ; valid ; ; NV8 # 3.2 QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT +25A0..25EF ; valid ; ; NV8 # 1.1 BLACK SQUARE..LARGE CIRCLE +25F0..25F7 ; valid ; ; NV8 # 3.0 WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT +25F8..25FF ; valid ; ; NV8 # 3.2 UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE +2600..2613 ; valid ; ; NV8 # 1.1 BLACK SUN WITH RAYS..SALTIRE +2614..2615 ; valid ; ; NV8 # 4.0 UMBRELLA WITH RAIN DROPS..HOT BEVERAGE +2616..2617 ; valid ; ; NV8 # 3.2 WHITE SHOGI PIECE..BLACK SHOGI PIECE +2618 ; valid ; ; NV8 # 4.1 SHAMROCK +2619 ; valid ; ; NV8 # 3.0 REVERSED ROTATED FLORAL HEART BULLET +261A..266F ; valid ; ; NV8 # 1.1 BLACK LEFT POINTING INDEX..MUSIC SHARP SIGN +2670..2671 ; valid ; ; NV8 # 3.0 WEST SYRIAC CROSS..EAST SYRIAC CROSS +2672..267D ; valid ; ; NV8 # 3.2 UNIVERSAL RECYCLING SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL +267E..267F ; valid ; ; NV8 # 4.1 PERMANENT PAPER SIGN..WHEELCHAIR SYMBOL +2680..2689 ; valid ; ; NV8 # 3.2 DIE FACE-1..BLACK CIRCLE WITH TWO WHITE DOTS +268A..2691 ; valid ; ; NV8 # 4.0 MONOGRAM FOR YANG..BLACK FLAG +2692..269C ; valid ; ; NV8 # 4.1 HAMMER AND PICK..FLEUR-DE-LIS +269D ; valid ; ; NV8 # 5.1 OUTLINED WHITE STAR +269E..269F ; valid ; ; NV8 # 5.2 THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT +26A0..26A1 ; valid ; ; NV8 # 4.0 WARNING SIGN..HIGH VOLTAGE SIGN +26A2..26B1 ; valid ; ; NV8 # 4.1 DOUBLED FEMALE SIGN..FUNERAL URN +26B2 ; valid ; ; NV8 # 5.0 NEUTER +26B3..26BC ; valid ; ; NV8 # 5.1 CERES..SESQUIQUADRATE +26BD..26BF ; valid ; ; NV8 # 5.2 SOCCER BALL..SQUARED KEY +26C0..26C3 ; valid ; ; NV8 # 5.1 WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING +26C4..26CD ; valid ; ; NV8 # 5.2 SNOWMAN WITHOUT SNOW..DISABLED CAR +26CE ; valid ; ; NV8 # 6.0 OPHIUCHUS +26CF..26E1 ; valid ; ; NV8 # 5.2 PICK..RESTRICTED LEFT ENTRY-2 +26E2 ; valid ; ; NV8 # 6.0 ASTRONOMICAL SYMBOL FOR URANUS +26E3 ; valid ; ; NV8 # 5.2 HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE +26E4..26E7 ; valid ; ; NV8 # 6.0 PENTAGRAM..INVERTED PENTAGRAM +26E8..26FF ; valid ; ; NV8 # 5.2 BLACK CROSS ON SHIELD..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE +2700 ; valid ; ; NV8 # 7.0 BLACK SAFETY SCISSORS +2701..2704 ; valid ; ; NV8 # 1.1 UPPER BLADE SCISSORS..WHITE SCISSORS +2705 ; valid ; ; NV8 # 6.0 WHITE HEAVY CHECK MARK +2706..2709 ; valid ; ; NV8 # 1.1 TELEPHONE LOCATION SIGN..ENVELOPE +270A..270B ; valid ; ; NV8 # 6.0 RAISED FIST..RAISED HAND +270C..2727 ; valid ; ; NV8 # 1.1 VICTORY HAND..WHITE FOUR POINTED STAR +2728 ; valid ; ; NV8 # 6.0 SPARKLES +2729..274B ; valid ; ; NV8 # 1.1 STRESS OUTLINED WHITE STAR..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK +274C ; valid ; ; NV8 # 6.0 CROSS MARK +274D ; valid ; ; NV8 # 1.1 SHADOWED WHITE CIRCLE +274E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED CROSS MARK +274F..2752 ; valid ; ; NV8 # 1.1 LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE +2753..2755 ; valid ; ; NV8 # 6.0 BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT +2756 ; valid ; ; NV8 # 1.1 BLACK DIAMOND MINUS WHITE X +2757 ; valid ; ; NV8 # 5.2 HEAVY EXCLAMATION MARK SYMBOL +2758..275E ; valid ; ; NV8 # 1.1 LIGHT VERTICAL BAR..HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT +275F..2760 ; valid ; ; NV8 # 6.0 HEAVY LOW SINGLE COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT +2761..2767 ; valid ; ; NV8 # 1.1 CURVED STEM PARAGRAPH SIGN ORNAMENT..ROTATED FLORAL HEART BULLET +2768..2775 ; valid ; ; NV8 # 3.2 MEDIUM LEFT PARENTHESIS ORNAMENT..MEDIUM RIGHT CURLY BRACKET ORNAMENT +2776..2794 ; valid ; ; NV8 # 1.1 DINGBAT NEGATIVE CIRCLED DIGIT ONE..HEAVY WIDE-HEADED RIGHTWARDS ARROW +2795..2797 ; valid ; ; NV8 # 6.0 HEAVY PLUS SIGN..HEAVY DIVISION SIGN +2798..27AF ; valid ; ; NV8 # 1.1 HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW +27B0 ; valid ; ; NV8 # 6.0 CURLY LOOP +27B1..27BE ; valid ; ; NV8 # 1.1 NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW +27BF ; valid ; ; NV8 # 6.0 DOUBLE CURLY LOOP +27C0..27C6 ; valid ; ; NV8 # 4.1 THREE DIMENSIONAL ANGLE..RIGHT S-SHAPED BAG DELIMITER +27C7..27CA ; valid ; ; NV8 # 5.0 OR WITH DOT INSIDE..VERTICAL BAR WITH HORIZONTAL STROKE +27CB ; valid ; ; NV8 # 6.1 MATHEMATICAL RISING DIAGONAL +27CC ; valid ; ; NV8 # 5.1 LONG DIVISION +27CD ; valid ; ; NV8 # 6.1 MATHEMATICAL FALLING DIAGONAL +27CE..27CF ; valid ; ; NV8 # 6.0 SQUARED LOGICAL AND..SQUARED LOGICAL OR +27D0..27EB ; valid ; ; NV8 # 3.2 WHITE DIAMOND WITH CENTRED DOT..MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET +27EC..27EF ; valid ; ; NV8 # 5.1 MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET..MATHEMATICAL RIGHT FLATTENED PARENTHESIS +27F0..27FF ; valid ; ; NV8 # 3.2 UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW +2800..28FF ; valid ; ; NV8 # 3.0 BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678 +2900..2A0B ; valid ; ; NV8 # 3.2 RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..SUMMATION WITH INTEGRAL +2A0C ; mapped ; 222B 222B 222B 222B #3.2 QUADRUPLE INTEGRAL OPERATOR +2A0D..2A73 ; valid ; ; NV8 # 3.2 FINITE PART INTEGRAL..EQUALS SIGN ABOVE TILDE OPERATOR +2A74 ; disallowed_STD3_mapped ; 003A 003A 003D #3.2 DOUBLE COLON EQUAL +2A75 ; disallowed_STD3_mapped ; 003D 003D # 3.2 TWO CONSECUTIVE EQUALS SIGNS +2A76 ; disallowed_STD3_mapped ; 003D 003D 003D #3.2 THREE CONSECUTIVE EQUALS SIGNS +2A77..2ADB ; valid ; ; NV8 # 3.2 EQUALS SIGN WITH TWO DOTS ABOVE AND TWO DOTS BELOW..TRANSVERSAL INTERSECTION +2ADC ; mapped ; 2ADD 0338 # 3.2 FORKING +2ADD..2AFF ; valid ; ; NV8 # 3.2 NONFORKING..N-ARY WHITE VERTICAL BAR +2B00..2B0D ; valid ; ; NV8 # 4.0 NORTH EAST WHITE ARROW..UP DOWN BLACK ARROW +2B0E..2B13 ; valid ; ; NV8 # 4.1 RIGHTWARDS ARROW WITH TIP DOWNWARDS..SQUARE WITH BOTTOM HALF BLACK +2B14..2B1A ; valid ; ; NV8 # 5.0 SQUARE WITH UPPER RIGHT DIAGONAL HALF BLACK..DOTTED SQUARE +2B1B..2B1F ; valid ; ; NV8 # 5.1 BLACK LARGE SQUARE..BLACK PENTAGON +2B20..2B23 ; valid ; ; NV8 # 5.0 WHITE PENTAGON..HORIZONTAL BLACK HEXAGON +2B24..2B4C ; valid ; ; NV8 # 5.1 BLACK LARGE CIRCLE..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR +2B4D..2B4F ; valid ; ; NV8 # 7.0 DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW +2B50..2B54 ; valid ; ; NV8 # 5.1 WHITE MEDIUM STAR..WHITE RIGHT-POINTING PENTAGON +2B55..2B59 ; valid ; ; NV8 # 5.2 HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE +2B5A..2B73 ; valid ; ; NV8 # 7.0 SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR +2B74..2B75 ; disallowed # NA .. +2B76..2B95 ; valid ; ; NV8 # 7.0 NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW +2B96..2B97 ; disallowed # NA .. +2B98..2BB9 ; valid ; ; NV8 # 7.0 THREE-D TOP-LIGHTED LEFTWARDS EQUILATERAL ARROWHEAD..UP ARROWHEAD IN A RECTANGLE BOX +2BBA..2BBC ; disallowed # NA .. +2BBD..2BC8 ; valid ; ; NV8 # 7.0 BALLOT BOX WITH LIGHT X..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED +2BC9 ; disallowed # NA +2BCA..2BD1 ; valid ; ; NV8 # 7.0 TOP HALF BLACK CIRCLE..UNCERTAINTY SIGN +2BD2..2BEB ; disallowed # NA .. +2BEC..2BEF ; valid ; ; NV8 # 8.0 LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS..DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS +2BF0..2BFF ; disallowed # NA .. +2C00 ; mapped ; 2C30 # 4.1 GLAGOLITIC CAPITAL LETTER AZU +2C01 ; mapped ; 2C31 # 4.1 GLAGOLITIC CAPITAL LETTER BUKY +2C02 ; mapped ; 2C32 # 4.1 GLAGOLITIC CAPITAL LETTER VEDE +2C03 ; mapped ; 2C33 # 4.1 GLAGOLITIC CAPITAL LETTER GLAGOLI +2C04 ; mapped ; 2C34 # 4.1 GLAGOLITIC CAPITAL LETTER DOBRO +2C05 ; mapped ; 2C35 # 4.1 GLAGOLITIC CAPITAL LETTER YESTU +2C06 ; mapped ; 2C36 # 4.1 GLAGOLITIC CAPITAL LETTER ZHIVETE +2C07 ; mapped ; 2C37 # 4.1 GLAGOLITIC CAPITAL LETTER DZELO +2C08 ; mapped ; 2C38 # 4.1 GLAGOLITIC CAPITAL LETTER ZEMLJA +2C09 ; mapped ; 2C39 # 4.1 GLAGOLITIC CAPITAL LETTER IZHE +2C0A ; mapped ; 2C3A # 4.1 GLAGOLITIC CAPITAL LETTER INITIAL IZHE +2C0B ; mapped ; 2C3B # 4.1 GLAGOLITIC CAPITAL LETTER I +2C0C ; mapped ; 2C3C # 4.1 GLAGOLITIC CAPITAL LETTER DJERVI +2C0D ; mapped ; 2C3D # 4.1 GLAGOLITIC CAPITAL LETTER KAKO +2C0E ; mapped ; 2C3E # 4.1 GLAGOLITIC CAPITAL LETTER LJUDIJE +2C0F ; mapped ; 2C3F # 4.1 GLAGOLITIC CAPITAL LETTER MYSLITE +2C10 ; mapped ; 2C40 # 4.1 GLAGOLITIC CAPITAL LETTER NASHI +2C11 ; mapped ; 2C41 # 4.1 GLAGOLITIC CAPITAL LETTER ONU +2C12 ; mapped ; 2C42 # 4.1 GLAGOLITIC CAPITAL LETTER POKOJI +2C13 ; mapped ; 2C43 # 4.1 GLAGOLITIC CAPITAL LETTER RITSI +2C14 ; mapped ; 2C44 # 4.1 GLAGOLITIC CAPITAL LETTER SLOVO +2C15 ; mapped ; 2C45 # 4.1 GLAGOLITIC CAPITAL LETTER TVRIDO +2C16 ; mapped ; 2C46 # 4.1 GLAGOLITIC CAPITAL LETTER UKU +2C17 ; mapped ; 2C47 # 4.1 GLAGOLITIC CAPITAL LETTER FRITU +2C18 ; mapped ; 2C48 # 4.1 GLAGOLITIC CAPITAL LETTER HERU +2C19 ; mapped ; 2C49 # 4.1 GLAGOLITIC CAPITAL LETTER OTU +2C1A ; mapped ; 2C4A # 4.1 GLAGOLITIC CAPITAL LETTER PE +2C1B ; mapped ; 2C4B # 4.1 GLAGOLITIC CAPITAL LETTER SHTA +2C1C ; mapped ; 2C4C # 4.1 GLAGOLITIC CAPITAL LETTER TSI +2C1D ; mapped ; 2C4D # 4.1 GLAGOLITIC CAPITAL LETTER CHRIVI +2C1E ; mapped ; 2C4E # 4.1 GLAGOLITIC CAPITAL LETTER SHA +2C1F ; mapped ; 2C4F # 4.1 GLAGOLITIC CAPITAL LETTER YERU +2C20 ; mapped ; 2C50 # 4.1 GLAGOLITIC CAPITAL LETTER YERI +2C21 ; mapped ; 2C51 # 4.1 GLAGOLITIC CAPITAL LETTER YATI +2C22 ; mapped ; 2C52 # 4.1 GLAGOLITIC CAPITAL LETTER SPIDERY HA +2C23 ; mapped ; 2C53 # 4.1 GLAGOLITIC CAPITAL LETTER YU +2C24 ; mapped ; 2C54 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS +2C25 ; mapped ; 2C55 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS WITH TAIL +2C26 ; mapped ; 2C56 # 4.1 GLAGOLITIC CAPITAL LETTER YO +2C27 ; mapped ; 2C57 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED SMALL YUS +2C28 ; mapped ; 2C58 # 4.1 GLAGOLITIC CAPITAL LETTER BIG YUS +2C29 ; mapped ; 2C59 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED BIG YUS +2C2A ; mapped ; 2C5A # 4.1 GLAGOLITIC CAPITAL LETTER FITA +2C2B ; mapped ; 2C5B # 4.1 GLAGOLITIC CAPITAL LETTER IZHITSA +2C2C ; mapped ; 2C5C # 4.1 GLAGOLITIC CAPITAL LETTER SHTAPIC +2C2D ; mapped ; 2C5D # 4.1 GLAGOLITIC CAPITAL LETTER TROKUTASTI A +2C2E ; mapped ; 2C5E # 4.1 GLAGOLITIC CAPITAL LETTER LATINATE MYSLITE +2C2F ; disallowed # NA +2C30..2C5E ; valid # 4.1 GLAGOLITIC SMALL LETTER AZU..GLAGOLITIC SMALL LETTER LATINATE MYSLITE +2C5F ; disallowed # NA +2C60 ; mapped ; 2C61 # 5.0 LATIN CAPITAL LETTER L WITH DOUBLE BAR +2C61 ; valid # 5.0 LATIN SMALL LETTER L WITH DOUBLE BAR +2C62 ; mapped ; 026B # 5.0 LATIN CAPITAL LETTER L WITH MIDDLE TILDE +2C63 ; mapped ; 1D7D # 5.0 LATIN CAPITAL LETTER P WITH STROKE +2C64 ; mapped ; 027D # 5.0 LATIN CAPITAL LETTER R WITH TAIL +2C65..2C66 ; valid # 5.0 LATIN SMALL LETTER A WITH STROKE..LATIN SMALL LETTER T WITH DIAGONAL STROKE +2C67 ; mapped ; 2C68 # 5.0 LATIN CAPITAL LETTER H WITH DESCENDER +2C68 ; valid # 5.0 LATIN SMALL LETTER H WITH DESCENDER +2C69 ; mapped ; 2C6A # 5.0 LATIN CAPITAL LETTER K WITH DESCENDER +2C6A ; valid # 5.0 LATIN SMALL LETTER K WITH DESCENDER +2C6B ; mapped ; 2C6C # 5.0 LATIN CAPITAL LETTER Z WITH DESCENDER +2C6C ; valid # 5.0 LATIN SMALL LETTER Z WITH DESCENDER +2C6D ; mapped ; 0251 # 5.1 LATIN CAPITAL LETTER ALPHA +2C6E ; mapped ; 0271 # 5.1 LATIN CAPITAL LETTER M WITH HOOK +2C6F ; mapped ; 0250 # 5.1 LATIN CAPITAL LETTER TURNED A +2C70 ; mapped ; 0252 # 5.2 LATIN CAPITAL LETTER TURNED ALPHA +2C71 ; valid # 5.1 LATIN SMALL LETTER V WITH RIGHT HOOK +2C72 ; mapped ; 2C73 # 5.1 LATIN CAPITAL LETTER W WITH HOOK +2C73 ; valid # 5.1 LATIN SMALL LETTER W WITH HOOK +2C74 ; valid # 5.0 LATIN SMALL LETTER V WITH CURL +2C75 ; mapped ; 2C76 # 5.0 LATIN CAPITAL LETTER HALF H +2C76..2C77 ; valid # 5.0 LATIN SMALL LETTER HALF H..LATIN SMALL LETTER TAILLESS PHI +2C78..2C7B ; valid # 5.1 LATIN SMALL LETTER E WITH NOTCH..LATIN LETTER SMALL CAPITAL TURNED E +2C7C ; mapped ; 006A # 5.1 LATIN SUBSCRIPT SMALL LETTER J +2C7D ; mapped ; 0076 # 5.1 MODIFIER LETTER CAPITAL V +2C7E ; mapped ; 023F # 5.2 LATIN CAPITAL LETTER S WITH SWASH TAIL +2C7F ; mapped ; 0240 # 5.2 LATIN CAPITAL LETTER Z WITH SWASH TAIL +2C80 ; mapped ; 2C81 # 4.1 COPTIC CAPITAL LETTER ALFA +2C81 ; valid # 4.1 COPTIC SMALL LETTER ALFA +2C82 ; mapped ; 2C83 # 4.1 COPTIC CAPITAL LETTER VIDA +2C83 ; valid # 4.1 COPTIC SMALL LETTER VIDA +2C84 ; mapped ; 2C85 # 4.1 COPTIC CAPITAL LETTER GAMMA +2C85 ; valid # 4.1 COPTIC SMALL LETTER GAMMA +2C86 ; mapped ; 2C87 # 4.1 COPTIC CAPITAL LETTER DALDA +2C87 ; valid # 4.1 COPTIC SMALL LETTER DALDA +2C88 ; mapped ; 2C89 # 4.1 COPTIC CAPITAL LETTER EIE +2C89 ; valid # 4.1 COPTIC SMALL LETTER EIE +2C8A ; mapped ; 2C8B # 4.1 COPTIC CAPITAL LETTER SOU +2C8B ; valid # 4.1 COPTIC SMALL LETTER SOU +2C8C ; mapped ; 2C8D # 4.1 COPTIC CAPITAL LETTER ZATA +2C8D ; valid # 4.1 COPTIC SMALL LETTER ZATA +2C8E ; mapped ; 2C8F # 4.1 COPTIC CAPITAL LETTER HATE +2C8F ; valid # 4.1 COPTIC SMALL LETTER HATE +2C90 ; mapped ; 2C91 # 4.1 COPTIC CAPITAL LETTER THETHE +2C91 ; valid # 4.1 COPTIC SMALL LETTER THETHE +2C92 ; mapped ; 2C93 # 4.1 COPTIC CAPITAL LETTER IAUDA +2C93 ; valid # 4.1 COPTIC SMALL LETTER IAUDA +2C94 ; mapped ; 2C95 # 4.1 COPTIC CAPITAL LETTER KAPA +2C95 ; valid # 4.1 COPTIC SMALL LETTER KAPA +2C96 ; mapped ; 2C97 # 4.1 COPTIC CAPITAL LETTER LAULA +2C97 ; valid # 4.1 COPTIC SMALL LETTER LAULA +2C98 ; mapped ; 2C99 # 4.1 COPTIC CAPITAL LETTER MI +2C99 ; valid # 4.1 COPTIC SMALL LETTER MI +2C9A ; mapped ; 2C9B # 4.1 COPTIC CAPITAL LETTER NI +2C9B ; valid # 4.1 COPTIC SMALL LETTER NI +2C9C ; mapped ; 2C9D # 4.1 COPTIC CAPITAL LETTER KSI +2C9D ; valid # 4.1 COPTIC SMALL LETTER KSI +2C9E ; mapped ; 2C9F # 4.1 COPTIC CAPITAL LETTER O +2C9F ; valid # 4.1 COPTIC SMALL LETTER O +2CA0 ; mapped ; 2CA1 # 4.1 COPTIC CAPITAL LETTER PI +2CA1 ; valid # 4.1 COPTIC SMALL LETTER PI +2CA2 ; mapped ; 2CA3 # 4.1 COPTIC CAPITAL LETTER RO +2CA3 ; valid # 4.1 COPTIC SMALL LETTER RO +2CA4 ; mapped ; 2CA5 # 4.1 COPTIC CAPITAL LETTER SIMA +2CA5 ; valid # 4.1 COPTIC SMALL LETTER SIMA +2CA6 ; mapped ; 2CA7 # 4.1 COPTIC CAPITAL LETTER TAU +2CA7 ; valid # 4.1 COPTIC SMALL LETTER TAU +2CA8 ; mapped ; 2CA9 # 4.1 COPTIC CAPITAL LETTER UA +2CA9 ; valid # 4.1 COPTIC SMALL LETTER UA +2CAA ; mapped ; 2CAB # 4.1 COPTIC CAPITAL LETTER FI +2CAB ; valid # 4.1 COPTIC SMALL LETTER FI +2CAC ; mapped ; 2CAD # 4.1 COPTIC CAPITAL LETTER KHI +2CAD ; valid # 4.1 COPTIC SMALL LETTER KHI +2CAE ; mapped ; 2CAF # 4.1 COPTIC CAPITAL LETTER PSI +2CAF ; valid # 4.1 COPTIC SMALL LETTER PSI +2CB0 ; mapped ; 2CB1 # 4.1 COPTIC CAPITAL LETTER OOU +2CB1 ; valid # 4.1 COPTIC SMALL LETTER OOU +2CB2 ; mapped ; 2CB3 # 4.1 COPTIC CAPITAL LETTER DIALECT-P ALEF +2CB3 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P ALEF +2CB4 ; mapped ; 2CB5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC AIN +2CB5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC AIN +2CB6 ; mapped ; 2CB7 # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC EIE +2CB7 ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC EIE +2CB8 ; mapped ; 2CB9 # 4.1 COPTIC CAPITAL LETTER DIALECT-P KAPA +2CB9 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P KAPA +2CBA ; mapped ; 2CBB # 4.1 COPTIC CAPITAL LETTER DIALECT-P NI +2CBB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P NI +2CBC ; mapped ; 2CBD # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC NI +2CBD ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC NI +2CBE ; mapped ; 2CBF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC OOU +2CBF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC OOU +2CC0 ; mapped ; 2CC1 # 4.1 COPTIC CAPITAL LETTER SAMPI +2CC1 ; valid # 4.1 COPTIC SMALL LETTER SAMPI +2CC2 ; mapped ; 2CC3 # 4.1 COPTIC CAPITAL LETTER CROSSED SHEI +2CC3 ; valid # 4.1 COPTIC SMALL LETTER CROSSED SHEI +2CC4 ; mapped ; 2CC5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHEI +2CC5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHEI +2CC6 ; mapped ; 2CC7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC ESH +2CC7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC ESH +2CC8 ; mapped ; 2CC9 # 4.1 COPTIC CAPITAL LETTER AKHMIMIC KHEI +2CC9 ; valid # 4.1 COPTIC SMALL LETTER AKHMIMIC KHEI +2CCA ; mapped ; 2CCB # 4.1 COPTIC CAPITAL LETTER DIALECT-P HORI +2CCB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P HORI +2CCC ; mapped ; 2CCD # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HORI +2CCD ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HORI +2CCE ; mapped ; 2CCF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HA +2CCF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HA +2CD0 ; mapped ; 2CD1 # 4.1 COPTIC CAPITAL LETTER L-SHAPED HA +2CD1 ; valid # 4.1 COPTIC SMALL LETTER L-SHAPED HA +2CD2 ; mapped ; 2CD3 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HEI +2CD3 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HEI +2CD4 ; mapped ; 2CD5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HAT +2CD5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HAT +2CD6 ; mapped ; 2CD7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC GANGIA +2CD7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC GANGIA +2CD8 ; mapped ; 2CD9 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC DJA +2CD9 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC DJA +2CDA ; mapped ; 2CDB # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHIMA +2CDB ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHIMA +2CDC ; mapped ; 2CDD # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN SHIMA +2CDD ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN SHIMA +2CDE ; mapped ; 2CDF # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NGI +2CDF ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NGI +2CE0 ; mapped ; 2CE1 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NYI +2CE1 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NYI +2CE2 ; mapped ; 2CE3 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN WAU +2CE3..2CE4 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN WAU..COPTIC SYMBOL KAI +2CE5..2CEA ; valid ; ; NV8 # 4.1 COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA +2CEB ; mapped ; 2CEC # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI +2CEC ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC SHEI +2CED ; mapped ; 2CEE # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC GANGIA +2CEE..2CF1 ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA..COPTIC COMBINING SPIRITUS LENIS +2CF2 ; mapped ; 2CF3 # 6.1 COPTIC CAPITAL LETTER BOHAIRIC KHEI +2CF3 ; valid # 6.1 COPTIC SMALL LETTER BOHAIRIC KHEI +2CF4..2CF8 ; disallowed # NA .. +2CF9..2CFF ; valid ; ; NV8 # 4.1 COPTIC OLD NUBIAN FULL STOP..COPTIC MORPHOLOGICAL DIVIDER +2D00..2D25 ; valid # 4.1 GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE +2D26 ; disallowed # NA +2D27 ; valid # 6.1 GEORGIAN SMALL LETTER YN +2D28..2D2C ; disallowed # NA .. +2D2D ; valid # 6.1 GEORGIAN SMALL LETTER AEN +2D2E..2D2F ; disallowed # NA .. +2D30..2D65 ; valid # 4.1 TIFINAGH LETTER YA..TIFINAGH LETTER YAZZ +2D66..2D67 ; valid # 6.1 TIFINAGH LETTER YE..TIFINAGH LETTER YO +2D68..2D6E ; disallowed # NA .. +2D6F ; mapped ; 2D61 # 4.1 TIFINAGH MODIFIER LETTER LABIALIZATION MARK +2D70 ; valid ; ; NV8 # 6.0 TIFINAGH SEPARATOR MARK +2D71..2D7E ; disallowed # NA .. +2D7F ; valid # 6.0 TIFINAGH CONSONANT JOINER +2D80..2D96 ; valid # 4.1 ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE +2D97..2D9F ; disallowed # NA .. +2DA0..2DA6 ; valid # 4.1 ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO +2DA7 ; disallowed # NA +2DA8..2DAE ; valid # 4.1 ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO +2DAF ; disallowed # NA +2DB0..2DB6 ; valid # 4.1 ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO +2DB7 ; disallowed # NA +2DB8..2DBE ; valid # 4.1 ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO +2DBF ; disallowed # NA +2DC0..2DC6 ; valid # 4.1 ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO +2DC7 ; disallowed # NA +2DC8..2DCE ; valid # 4.1 ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO +2DCF ; disallowed # NA +2DD0..2DD6 ; valid # 4.1 ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO +2DD7 ; disallowed # NA +2DD8..2DDE ; valid # 4.1 ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO +2DDF ; disallowed # NA +2DE0..2DFF ; valid # 5.1 COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS +2E00..2E17 ; valid ; ; NV8 # 4.1 RIGHT ANGLE SUBSTITUTION MARKER..DOUBLE OBLIQUE HYPHEN +2E18..2E1B ; valid ; ; NV8 # 5.1 INVERTED INTERROBANG..TILDE WITH RING ABOVE +2E1C..2E1D ; valid ; ; NV8 # 4.1 LEFT LOW PARAPHRASE BRACKET..RIGHT LOW PARAPHRASE BRACKET +2E1E..2E2E ; valid ; ; NV8 # 5.1 TILDE WITH DOT ABOVE..REVERSED QUESTION MARK +2E2F ; valid # 5.1 VERTICAL TILDE +2E30 ; valid ; ; NV8 # 5.1 RING POINT +2E31 ; valid ; ; NV8 # 5.2 WORD SEPARATOR MIDDLE DOT +2E32..2E3B ; valid ; ; NV8 # 6.1 TURNED COMMA..THREE-EM DASH +2E3C..2E42 ; valid ; ; NV8 # 7.0 STENOGRAPHIC FULL STOP..DOUBLE LOW-REVERSED-9 QUOTATION MARK +2E43..2E44 ; valid ; ; NV8 # 9.0 DASH WITH LEFT UPTURN..DOUBLE SUSPENSION MARK +2E45..2E7F ; disallowed # NA .. +2E80..2E99 ; valid ; ; NV8 # 3.0 CJK RADICAL REPEAT..CJK RADICAL RAP +2E9A ; disallowed # NA +2E9B..2E9E ; valid ; ; NV8 # 3.0 CJK RADICAL CHOKE..CJK RADICAL DEATH +2E9F ; mapped ; 6BCD # 3.0 CJK RADICAL MOTHER +2EA0..2EF2 ; valid ; ; NV8 # 3.0 CJK RADICAL CIVILIAN..CJK RADICAL J-SIMPLIFIED TURTLE +2EF3 ; mapped ; 9F9F # 3.0 CJK RADICAL C-SIMPLIFIED TURTLE +2EF4..2EFF ; disallowed # NA .. +2F00 ; mapped ; 4E00 # 3.0 KANGXI RADICAL ONE +2F01 ; mapped ; 4E28 # 3.0 KANGXI RADICAL LINE +2F02 ; mapped ; 4E36 # 3.0 KANGXI RADICAL DOT +2F03 ; mapped ; 4E3F # 3.0 KANGXI RADICAL SLASH +2F04 ; mapped ; 4E59 # 3.0 KANGXI RADICAL SECOND +2F05 ; mapped ; 4E85 # 3.0 KANGXI RADICAL HOOK +2F06 ; mapped ; 4E8C # 3.0 KANGXI RADICAL TWO +2F07 ; mapped ; 4EA0 # 3.0 KANGXI RADICAL LID +2F08 ; mapped ; 4EBA # 3.0 KANGXI RADICAL MAN +2F09 ; mapped ; 513F # 3.0 KANGXI RADICAL LEGS +2F0A ; mapped ; 5165 # 3.0 KANGXI RADICAL ENTER +2F0B ; mapped ; 516B # 3.0 KANGXI RADICAL EIGHT +2F0C ; mapped ; 5182 # 3.0 KANGXI RADICAL DOWN BOX +2F0D ; mapped ; 5196 # 3.0 KANGXI RADICAL COVER +2F0E ; mapped ; 51AB # 3.0 KANGXI RADICAL ICE +2F0F ; mapped ; 51E0 # 3.0 KANGXI RADICAL TABLE +2F10 ; mapped ; 51F5 # 3.0 KANGXI RADICAL OPEN BOX +2F11 ; mapped ; 5200 # 3.0 KANGXI RADICAL KNIFE +2F12 ; mapped ; 529B # 3.0 KANGXI RADICAL POWER +2F13 ; mapped ; 52F9 # 3.0 KANGXI RADICAL WRAP +2F14 ; mapped ; 5315 # 3.0 KANGXI RADICAL SPOON +2F15 ; mapped ; 531A # 3.0 KANGXI RADICAL RIGHT OPEN BOX +2F16 ; mapped ; 5338 # 3.0 KANGXI RADICAL HIDING ENCLOSURE +2F17 ; mapped ; 5341 # 3.0 KANGXI RADICAL TEN +2F18 ; mapped ; 535C # 3.0 KANGXI RADICAL DIVINATION +2F19 ; mapped ; 5369 # 3.0 KANGXI RADICAL SEAL +2F1A ; mapped ; 5382 # 3.0 KANGXI RADICAL CLIFF +2F1B ; mapped ; 53B6 # 3.0 KANGXI RADICAL PRIVATE +2F1C ; mapped ; 53C8 # 3.0 KANGXI RADICAL AGAIN +2F1D ; mapped ; 53E3 # 3.0 KANGXI RADICAL MOUTH +2F1E ; mapped ; 56D7 # 3.0 KANGXI RADICAL ENCLOSURE +2F1F ; mapped ; 571F # 3.0 KANGXI RADICAL EARTH +2F20 ; mapped ; 58EB # 3.0 KANGXI RADICAL SCHOLAR +2F21 ; mapped ; 5902 # 3.0 KANGXI RADICAL GO +2F22 ; mapped ; 590A # 3.0 KANGXI RADICAL GO SLOWLY +2F23 ; mapped ; 5915 # 3.0 KANGXI RADICAL EVENING +2F24 ; mapped ; 5927 # 3.0 KANGXI RADICAL BIG +2F25 ; mapped ; 5973 # 3.0 KANGXI RADICAL WOMAN +2F26 ; mapped ; 5B50 # 3.0 KANGXI RADICAL CHILD +2F27 ; mapped ; 5B80 # 3.0 KANGXI RADICAL ROOF +2F28 ; mapped ; 5BF8 # 3.0 KANGXI RADICAL INCH +2F29 ; mapped ; 5C0F # 3.0 KANGXI RADICAL SMALL +2F2A ; mapped ; 5C22 # 3.0 KANGXI RADICAL LAME +2F2B ; mapped ; 5C38 # 3.0 KANGXI RADICAL CORPSE +2F2C ; mapped ; 5C6E # 3.0 KANGXI RADICAL SPROUT +2F2D ; mapped ; 5C71 # 3.0 KANGXI RADICAL MOUNTAIN +2F2E ; mapped ; 5DDB # 3.0 KANGXI RADICAL RIVER +2F2F ; mapped ; 5DE5 # 3.0 KANGXI RADICAL WORK +2F30 ; mapped ; 5DF1 # 3.0 KANGXI RADICAL ONESELF +2F31 ; mapped ; 5DFE # 3.0 KANGXI RADICAL TURBAN +2F32 ; mapped ; 5E72 # 3.0 KANGXI RADICAL DRY +2F33 ; mapped ; 5E7A # 3.0 KANGXI RADICAL SHORT THREAD +2F34 ; mapped ; 5E7F # 3.0 KANGXI RADICAL DOTTED CLIFF +2F35 ; mapped ; 5EF4 # 3.0 KANGXI RADICAL LONG STRIDE +2F36 ; mapped ; 5EFE # 3.0 KANGXI RADICAL TWO HANDS +2F37 ; mapped ; 5F0B # 3.0 KANGXI RADICAL SHOOT +2F38 ; mapped ; 5F13 # 3.0 KANGXI RADICAL BOW +2F39 ; mapped ; 5F50 # 3.0 KANGXI RADICAL SNOUT +2F3A ; mapped ; 5F61 # 3.0 KANGXI RADICAL BRISTLE +2F3B ; mapped ; 5F73 # 3.0 KANGXI RADICAL STEP +2F3C ; mapped ; 5FC3 # 3.0 KANGXI RADICAL HEART +2F3D ; mapped ; 6208 # 3.0 KANGXI RADICAL HALBERD +2F3E ; mapped ; 6236 # 3.0 KANGXI RADICAL DOOR +2F3F ; mapped ; 624B # 3.0 KANGXI RADICAL HAND +2F40 ; mapped ; 652F # 3.0 KANGXI RADICAL BRANCH +2F41 ; mapped ; 6534 # 3.0 KANGXI RADICAL RAP +2F42 ; mapped ; 6587 # 3.0 KANGXI RADICAL SCRIPT +2F43 ; mapped ; 6597 # 3.0 KANGXI RADICAL DIPPER +2F44 ; mapped ; 65A4 # 3.0 KANGXI RADICAL AXE +2F45 ; mapped ; 65B9 # 3.0 KANGXI RADICAL SQUARE +2F46 ; mapped ; 65E0 # 3.0 KANGXI RADICAL NOT +2F47 ; mapped ; 65E5 # 3.0 KANGXI RADICAL SUN +2F48 ; mapped ; 66F0 # 3.0 KANGXI RADICAL SAY +2F49 ; mapped ; 6708 # 3.0 KANGXI RADICAL MOON +2F4A ; mapped ; 6728 # 3.0 KANGXI RADICAL TREE +2F4B ; mapped ; 6B20 # 3.0 KANGXI RADICAL LACK +2F4C ; mapped ; 6B62 # 3.0 KANGXI RADICAL STOP +2F4D ; mapped ; 6B79 # 3.0 KANGXI RADICAL DEATH +2F4E ; mapped ; 6BB3 # 3.0 KANGXI RADICAL WEAPON +2F4F ; mapped ; 6BCB # 3.0 KANGXI RADICAL DO NOT +2F50 ; mapped ; 6BD4 # 3.0 KANGXI RADICAL COMPARE +2F51 ; mapped ; 6BDB # 3.0 KANGXI RADICAL FUR +2F52 ; mapped ; 6C0F # 3.0 KANGXI RADICAL CLAN +2F53 ; mapped ; 6C14 # 3.0 KANGXI RADICAL STEAM +2F54 ; mapped ; 6C34 # 3.0 KANGXI RADICAL WATER +2F55 ; mapped ; 706B # 3.0 KANGXI RADICAL FIRE +2F56 ; mapped ; 722A # 3.0 KANGXI RADICAL CLAW +2F57 ; mapped ; 7236 # 3.0 KANGXI RADICAL FATHER +2F58 ; mapped ; 723B # 3.0 KANGXI RADICAL DOUBLE X +2F59 ; mapped ; 723F # 3.0 KANGXI RADICAL HALF TREE TRUNK +2F5A ; mapped ; 7247 # 3.0 KANGXI RADICAL SLICE +2F5B ; mapped ; 7259 # 3.0 KANGXI RADICAL FANG +2F5C ; mapped ; 725B # 3.0 KANGXI RADICAL COW +2F5D ; mapped ; 72AC # 3.0 KANGXI RADICAL DOG +2F5E ; mapped ; 7384 # 3.0 KANGXI RADICAL PROFOUND +2F5F ; mapped ; 7389 # 3.0 KANGXI RADICAL JADE +2F60 ; mapped ; 74DC # 3.0 KANGXI RADICAL MELON +2F61 ; mapped ; 74E6 # 3.0 KANGXI RADICAL TILE +2F62 ; mapped ; 7518 # 3.0 KANGXI RADICAL SWEET +2F63 ; mapped ; 751F # 3.0 KANGXI RADICAL LIFE +2F64 ; mapped ; 7528 # 3.0 KANGXI RADICAL USE +2F65 ; mapped ; 7530 # 3.0 KANGXI RADICAL FIELD +2F66 ; mapped ; 758B # 3.0 KANGXI RADICAL BOLT OF CLOTH +2F67 ; mapped ; 7592 # 3.0 KANGXI RADICAL SICKNESS +2F68 ; mapped ; 7676 # 3.0 KANGXI RADICAL DOTTED TENT +2F69 ; mapped ; 767D # 3.0 KANGXI RADICAL WHITE +2F6A ; mapped ; 76AE # 3.0 KANGXI RADICAL SKIN +2F6B ; mapped ; 76BF # 3.0 KANGXI RADICAL DISH +2F6C ; mapped ; 76EE # 3.0 KANGXI RADICAL EYE +2F6D ; mapped ; 77DB # 3.0 KANGXI RADICAL SPEAR +2F6E ; mapped ; 77E2 # 3.0 KANGXI RADICAL ARROW +2F6F ; mapped ; 77F3 # 3.0 KANGXI RADICAL STONE +2F70 ; mapped ; 793A # 3.0 KANGXI RADICAL SPIRIT +2F71 ; mapped ; 79B8 # 3.0 KANGXI RADICAL TRACK +2F72 ; mapped ; 79BE # 3.0 KANGXI RADICAL GRAIN +2F73 ; mapped ; 7A74 # 3.0 KANGXI RADICAL CAVE +2F74 ; mapped ; 7ACB # 3.0 KANGXI RADICAL STAND +2F75 ; mapped ; 7AF9 # 3.0 KANGXI RADICAL BAMBOO +2F76 ; mapped ; 7C73 # 3.0 KANGXI RADICAL RICE +2F77 ; mapped ; 7CF8 # 3.0 KANGXI RADICAL SILK +2F78 ; mapped ; 7F36 # 3.0 KANGXI RADICAL JAR +2F79 ; mapped ; 7F51 # 3.0 KANGXI RADICAL NET +2F7A ; mapped ; 7F8A # 3.0 KANGXI RADICAL SHEEP +2F7B ; mapped ; 7FBD # 3.0 KANGXI RADICAL FEATHER +2F7C ; mapped ; 8001 # 3.0 KANGXI RADICAL OLD +2F7D ; mapped ; 800C # 3.0 KANGXI RADICAL AND +2F7E ; mapped ; 8012 # 3.0 KANGXI RADICAL PLOW +2F7F ; mapped ; 8033 # 3.0 KANGXI RADICAL EAR +2F80 ; mapped ; 807F # 3.0 KANGXI RADICAL BRUSH +2F81 ; mapped ; 8089 # 3.0 KANGXI RADICAL MEAT +2F82 ; mapped ; 81E3 # 3.0 KANGXI RADICAL MINISTER +2F83 ; mapped ; 81EA # 3.0 KANGXI RADICAL SELF +2F84 ; mapped ; 81F3 # 3.0 KANGXI RADICAL ARRIVE +2F85 ; mapped ; 81FC # 3.0 KANGXI RADICAL MORTAR +2F86 ; mapped ; 820C # 3.0 KANGXI RADICAL TONGUE +2F87 ; mapped ; 821B # 3.0 KANGXI RADICAL OPPOSE +2F88 ; mapped ; 821F # 3.0 KANGXI RADICAL BOAT +2F89 ; mapped ; 826E # 3.0 KANGXI RADICAL STOPPING +2F8A ; mapped ; 8272 # 3.0 KANGXI RADICAL COLOR +2F8B ; mapped ; 8278 # 3.0 KANGXI RADICAL GRASS +2F8C ; mapped ; 864D # 3.0 KANGXI RADICAL TIGER +2F8D ; mapped ; 866B # 3.0 KANGXI RADICAL INSECT +2F8E ; mapped ; 8840 # 3.0 KANGXI RADICAL BLOOD +2F8F ; mapped ; 884C # 3.0 KANGXI RADICAL WALK ENCLOSURE +2F90 ; mapped ; 8863 # 3.0 KANGXI RADICAL CLOTHES +2F91 ; mapped ; 897E # 3.0 KANGXI RADICAL WEST +2F92 ; mapped ; 898B # 3.0 KANGXI RADICAL SEE +2F93 ; mapped ; 89D2 # 3.0 KANGXI RADICAL HORN +2F94 ; mapped ; 8A00 # 3.0 KANGXI RADICAL SPEECH +2F95 ; mapped ; 8C37 # 3.0 KANGXI RADICAL VALLEY +2F96 ; mapped ; 8C46 # 3.0 KANGXI RADICAL BEAN +2F97 ; mapped ; 8C55 # 3.0 KANGXI RADICAL PIG +2F98 ; mapped ; 8C78 # 3.0 KANGXI RADICAL BADGER +2F99 ; mapped ; 8C9D # 3.0 KANGXI RADICAL SHELL +2F9A ; mapped ; 8D64 # 3.0 KANGXI RADICAL RED +2F9B ; mapped ; 8D70 # 3.0 KANGXI RADICAL RUN +2F9C ; mapped ; 8DB3 # 3.0 KANGXI RADICAL FOOT +2F9D ; mapped ; 8EAB # 3.0 KANGXI RADICAL BODY +2F9E ; mapped ; 8ECA # 3.0 KANGXI RADICAL CART +2F9F ; mapped ; 8F9B # 3.0 KANGXI RADICAL BITTER +2FA0 ; mapped ; 8FB0 # 3.0 KANGXI RADICAL MORNING +2FA1 ; mapped ; 8FB5 # 3.0 KANGXI RADICAL WALK +2FA2 ; mapped ; 9091 # 3.0 KANGXI RADICAL CITY +2FA3 ; mapped ; 9149 # 3.0 KANGXI RADICAL WINE +2FA4 ; mapped ; 91C6 # 3.0 KANGXI RADICAL DISTINGUISH +2FA5 ; mapped ; 91CC # 3.0 KANGXI RADICAL VILLAGE +2FA6 ; mapped ; 91D1 # 3.0 KANGXI RADICAL GOLD +2FA7 ; mapped ; 9577 # 3.0 KANGXI RADICAL LONG +2FA8 ; mapped ; 9580 # 3.0 KANGXI RADICAL GATE +2FA9 ; mapped ; 961C # 3.0 KANGXI RADICAL MOUND +2FAA ; mapped ; 96B6 # 3.0 KANGXI RADICAL SLAVE +2FAB ; mapped ; 96B9 # 3.0 KANGXI RADICAL SHORT TAILED BIRD +2FAC ; mapped ; 96E8 # 3.0 KANGXI RADICAL RAIN +2FAD ; mapped ; 9751 # 3.0 KANGXI RADICAL BLUE +2FAE ; mapped ; 975E # 3.0 KANGXI RADICAL WRONG +2FAF ; mapped ; 9762 # 3.0 KANGXI RADICAL FACE +2FB0 ; mapped ; 9769 # 3.0 KANGXI RADICAL LEATHER +2FB1 ; mapped ; 97CB # 3.0 KANGXI RADICAL TANNED LEATHER +2FB2 ; mapped ; 97ED # 3.0 KANGXI RADICAL LEEK +2FB3 ; mapped ; 97F3 # 3.0 KANGXI RADICAL SOUND +2FB4 ; mapped ; 9801 # 3.0 KANGXI RADICAL LEAF +2FB5 ; mapped ; 98A8 # 3.0 KANGXI RADICAL WIND +2FB6 ; mapped ; 98DB # 3.0 KANGXI RADICAL FLY +2FB7 ; mapped ; 98DF # 3.0 KANGXI RADICAL EAT +2FB8 ; mapped ; 9996 # 3.0 KANGXI RADICAL HEAD +2FB9 ; mapped ; 9999 # 3.0 KANGXI RADICAL FRAGRANT +2FBA ; mapped ; 99AC # 3.0 KANGXI RADICAL HORSE +2FBB ; mapped ; 9AA8 # 3.0 KANGXI RADICAL BONE +2FBC ; mapped ; 9AD8 # 3.0 KANGXI RADICAL TALL +2FBD ; mapped ; 9ADF # 3.0 KANGXI RADICAL HAIR +2FBE ; mapped ; 9B25 # 3.0 KANGXI RADICAL FIGHT +2FBF ; mapped ; 9B2F # 3.0 KANGXI RADICAL SACRIFICIAL WINE +2FC0 ; mapped ; 9B32 # 3.0 KANGXI RADICAL CAULDRON +2FC1 ; mapped ; 9B3C # 3.0 KANGXI RADICAL GHOST +2FC2 ; mapped ; 9B5A # 3.0 KANGXI RADICAL FISH +2FC3 ; mapped ; 9CE5 # 3.0 KANGXI RADICAL BIRD +2FC4 ; mapped ; 9E75 # 3.0 KANGXI RADICAL SALT +2FC5 ; mapped ; 9E7F # 3.0 KANGXI RADICAL DEER +2FC6 ; mapped ; 9EA5 # 3.0 KANGXI RADICAL WHEAT +2FC7 ; mapped ; 9EBB # 3.0 KANGXI RADICAL HEMP +2FC8 ; mapped ; 9EC3 # 3.0 KANGXI RADICAL YELLOW +2FC9 ; mapped ; 9ECD # 3.0 KANGXI RADICAL MILLET +2FCA ; mapped ; 9ED1 # 3.0 KANGXI RADICAL BLACK +2FCB ; mapped ; 9EF9 # 3.0 KANGXI RADICAL EMBROIDERY +2FCC ; mapped ; 9EFD # 3.0 KANGXI RADICAL FROG +2FCD ; mapped ; 9F0E # 3.0 KANGXI RADICAL TRIPOD +2FCE ; mapped ; 9F13 # 3.0 KANGXI RADICAL DRUM +2FCF ; mapped ; 9F20 # 3.0 KANGXI RADICAL RAT +2FD0 ; mapped ; 9F3B # 3.0 KANGXI RADICAL NOSE +2FD1 ; mapped ; 9F4A # 3.0 KANGXI RADICAL EVEN +2FD2 ; mapped ; 9F52 # 3.0 KANGXI RADICAL TOOTH +2FD3 ; mapped ; 9F8D # 3.0 KANGXI RADICAL DRAGON +2FD4 ; mapped ; 9F9C # 3.0 KANGXI RADICAL TURTLE +2FD5 ; mapped ; 9FA0 # 3.0 KANGXI RADICAL FLUTE +2FD6..2FEF ; disallowed # NA .. +2FF0..2FFB ; disallowed # 3.0 IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID +2FFC..2FFF ; disallowed # NA .. +3000 ; disallowed_STD3_mapped ; 0020 # 1.1 IDEOGRAPHIC SPACE +3001 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC COMMA +3002 ; mapped ; 002E # 1.1 IDEOGRAPHIC FULL STOP +3003..3004 ; valid ; ; NV8 # 1.1 DITTO MARK..JAPANESE INDUSTRIAL STANDARD SYMBOL +3005..3007 ; valid # 1.1 IDEOGRAPHIC ITERATION MARK..IDEOGRAPHIC NUMBER ZERO +3008..3029 ; valid ; ; NV8 # 1.1 LEFT ANGLE BRACKET..HANGZHOU NUMERAL NINE +302A..302D ; valid # 1.1 IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK +302E..3035 ; valid ; ; NV8 # 1.1 HANGUL SINGLE DOT TONE MARK..VERTICAL KANA REPEAT MARK LOWER HALF +3036 ; mapped ; 3012 # 1.1 CIRCLED POSTAL MARK +3037 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL +3038 ; mapped ; 5341 # 3.0 HANGZHOU NUMERAL TEN +3039 ; mapped ; 5344 # 3.0 HANGZHOU NUMERAL TWENTY +303A ; mapped ; 5345 # 3.0 HANGZHOU NUMERAL THIRTY +303B ; valid ; ; NV8 # 3.2 VERTICAL IDEOGRAPHIC ITERATION MARK +303C ; valid # 3.2 MASU MARK +303D ; valid ; ; NV8 # 3.2 PART ALTERNATION MARK +303E ; valid ; ; NV8 # 3.0 IDEOGRAPHIC VARIATION INDICATOR +303F ; valid ; ; NV8 # 1.1 IDEOGRAPHIC HALF FILL SPACE +3040 ; disallowed # NA +3041..3094 ; valid # 1.1 HIRAGANA LETTER SMALL A..HIRAGANA LETTER VU +3095..3096 ; valid # 3.2 HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE +3097..3098 ; disallowed # NA .. +3099..309A ; valid # 1.1 COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +309B ; disallowed_STD3_mapped ; 0020 3099 # 1.1 KATAKANA-HIRAGANA VOICED SOUND MARK +309C ; disallowed_STD3_mapped ; 0020 309A # 1.1 KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +309D..309E ; valid # 1.1 HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK +309F ; mapped ; 3088 308A # 3.2 HIRAGANA DIGRAPH YORI +30A0 ; valid ; ; NV8 # 3.2 KATAKANA-HIRAGANA DOUBLE HYPHEN +30A1..30FE ; valid # 1.1 KATAKANA LETTER SMALL A..KATAKANA VOICED ITERATION MARK +30FF ; mapped ; 30B3 30C8 # 3.2 KATAKANA DIGRAPH KOTO +3100..3104 ; disallowed # NA .. +3105..312C ; valid # 1.1 BOPOMOFO LETTER B..BOPOMOFO LETTER GN +312D ; valid # 5.1 BOPOMOFO LETTER IH +312E..3130 ; disallowed # NA .. +3131 ; mapped ; 1100 # 1.1 HANGUL LETTER KIYEOK +3132 ; mapped ; 1101 # 1.1 HANGUL LETTER SSANGKIYEOK +3133 ; mapped ; 11AA # 1.1 HANGUL LETTER KIYEOK-SIOS +3134 ; mapped ; 1102 # 1.1 HANGUL LETTER NIEUN +3135 ; mapped ; 11AC # 1.1 HANGUL LETTER NIEUN-CIEUC +3136 ; mapped ; 11AD # 1.1 HANGUL LETTER NIEUN-HIEUH +3137 ; mapped ; 1103 # 1.1 HANGUL LETTER TIKEUT +3138 ; mapped ; 1104 # 1.1 HANGUL LETTER SSANGTIKEUT +3139 ; mapped ; 1105 # 1.1 HANGUL LETTER RIEUL +313A ; mapped ; 11B0 # 1.1 HANGUL LETTER RIEUL-KIYEOK +313B ; mapped ; 11B1 # 1.1 HANGUL LETTER RIEUL-MIEUM +313C ; mapped ; 11B2 # 1.1 HANGUL LETTER RIEUL-PIEUP +313D ; mapped ; 11B3 # 1.1 HANGUL LETTER RIEUL-SIOS +313E ; mapped ; 11B4 # 1.1 HANGUL LETTER RIEUL-THIEUTH +313F ; mapped ; 11B5 # 1.1 HANGUL LETTER RIEUL-PHIEUPH +3140 ; mapped ; 111A # 1.1 HANGUL LETTER RIEUL-HIEUH +3141 ; mapped ; 1106 # 1.1 HANGUL LETTER MIEUM +3142 ; mapped ; 1107 # 1.1 HANGUL LETTER PIEUP +3143 ; mapped ; 1108 # 1.1 HANGUL LETTER SSANGPIEUP +3144 ; mapped ; 1121 # 1.1 HANGUL LETTER PIEUP-SIOS +3145 ; mapped ; 1109 # 1.1 HANGUL LETTER SIOS +3146 ; mapped ; 110A # 1.1 HANGUL LETTER SSANGSIOS +3147 ; mapped ; 110B # 1.1 HANGUL LETTER IEUNG +3148 ; mapped ; 110C # 1.1 HANGUL LETTER CIEUC +3149 ; mapped ; 110D # 1.1 HANGUL LETTER SSANGCIEUC +314A ; mapped ; 110E # 1.1 HANGUL LETTER CHIEUCH +314B ; mapped ; 110F # 1.1 HANGUL LETTER KHIEUKH +314C ; mapped ; 1110 # 1.1 HANGUL LETTER THIEUTH +314D ; mapped ; 1111 # 1.1 HANGUL LETTER PHIEUPH +314E ; mapped ; 1112 # 1.1 HANGUL LETTER HIEUH +314F ; mapped ; 1161 # 1.1 HANGUL LETTER A +3150 ; mapped ; 1162 # 1.1 HANGUL LETTER AE +3151 ; mapped ; 1163 # 1.1 HANGUL LETTER YA +3152 ; mapped ; 1164 # 1.1 HANGUL LETTER YAE +3153 ; mapped ; 1165 # 1.1 HANGUL LETTER EO +3154 ; mapped ; 1166 # 1.1 HANGUL LETTER E +3155 ; mapped ; 1167 # 1.1 HANGUL LETTER YEO +3156 ; mapped ; 1168 # 1.1 HANGUL LETTER YE +3157 ; mapped ; 1169 # 1.1 HANGUL LETTER O +3158 ; mapped ; 116A # 1.1 HANGUL LETTER WA +3159 ; mapped ; 116B # 1.1 HANGUL LETTER WAE +315A ; mapped ; 116C # 1.1 HANGUL LETTER OE +315B ; mapped ; 116D # 1.1 HANGUL LETTER YO +315C ; mapped ; 116E # 1.1 HANGUL LETTER U +315D ; mapped ; 116F # 1.1 HANGUL LETTER WEO +315E ; mapped ; 1170 # 1.1 HANGUL LETTER WE +315F ; mapped ; 1171 # 1.1 HANGUL LETTER WI +3160 ; mapped ; 1172 # 1.1 HANGUL LETTER YU +3161 ; mapped ; 1173 # 1.1 HANGUL LETTER EU +3162 ; mapped ; 1174 # 1.1 HANGUL LETTER YI +3163 ; mapped ; 1175 # 1.1 HANGUL LETTER I +3164 ; disallowed # 1.1 HANGUL FILLER +3165 ; mapped ; 1114 # 1.1 HANGUL LETTER SSANGNIEUN +3166 ; mapped ; 1115 # 1.1 HANGUL LETTER NIEUN-TIKEUT +3167 ; mapped ; 11C7 # 1.1 HANGUL LETTER NIEUN-SIOS +3168 ; mapped ; 11C8 # 1.1 HANGUL LETTER NIEUN-PANSIOS +3169 ; mapped ; 11CC # 1.1 HANGUL LETTER RIEUL-KIYEOK-SIOS +316A ; mapped ; 11CE # 1.1 HANGUL LETTER RIEUL-TIKEUT +316B ; mapped ; 11D3 # 1.1 HANGUL LETTER RIEUL-PIEUP-SIOS +316C ; mapped ; 11D7 # 1.1 HANGUL LETTER RIEUL-PANSIOS +316D ; mapped ; 11D9 # 1.1 HANGUL LETTER RIEUL-YEORINHIEUH +316E ; mapped ; 111C # 1.1 HANGUL LETTER MIEUM-PIEUP +316F ; mapped ; 11DD # 1.1 HANGUL LETTER MIEUM-SIOS +3170 ; mapped ; 11DF # 1.1 HANGUL LETTER MIEUM-PANSIOS +3171 ; mapped ; 111D # 1.1 HANGUL LETTER KAPYEOUNMIEUM +3172 ; mapped ; 111E # 1.1 HANGUL LETTER PIEUP-KIYEOK +3173 ; mapped ; 1120 # 1.1 HANGUL LETTER PIEUP-TIKEUT +3174 ; mapped ; 1122 # 1.1 HANGUL LETTER PIEUP-SIOS-KIYEOK +3175 ; mapped ; 1123 # 1.1 HANGUL LETTER PIEUP-SIOS-TIKEUT +3176 ; mapped ; 1127 # 1.1 HANGUL LETTER PIEUP-CIEUC +3177 ; mapped ; 1129 # 1.1 HANGUL LETTER PIEUP-THIEUTH +3178 ; mapped ; 112B # 1.1 HANGUL LETTER KAPYEOUNPIEUP +3179 ; mapped ; 112C # 1.1 HANGUL LETTER KAPYEOUNSSANGPIEUP +317A ; mapped ; 112D # 1.1 HANGUL LETTER SIOS-KIYEOK +317B ; mapped ; 112E # 1.1 HANGUL LETTER SIOS-NIEUN +317C ; mapped ; 112F # 1.1 HANGUL LETTER SIOS-TIKEUT +317D ; mapped ; 1132 # 1.1 HANGUL LETTER SIOS-PIEUP +317E ; mapped ; 1136 # 1.1 HANGUL LETTER SIOS-CIEUC +317F ; mapped ; 1140 # 1.1 HANGUL LETTER PANSIOS +3180 ; mapped ; 1147 # 1.1 HANGUL LETTER SSANGIEUNG +3181 ; mapped ; 114C # 1.1 HANGUL LETTER YESIEUNG +3182 ; mapped ; 11F1 # 1.1 HANGUL LETTER YESIEUNG-SIOS +3183 ; mapped ; 11F2 # 1.1 HANGUL LETTER YESIEUNG-PANSIOS +3184 ; mapped ; 1157 # 1.1 HANGUL LETTER KAPYEOUNPHIEUPH +3185 ; mapped ; 1158 # 1.1 HANGUL LETTER SSANGHIEUH +3186 ; mapped ; 1159 # 1.1 HANGUL LETTER YEORINHIEUH +3187 ; mapped ; 1184 # 1.1 HANGUL LETTER YO-YA +3188 ; mapped ; 1185 # 1.1 HANGUL LETTER YO-YAE +3189 ; mapped ; 1188 # 1.1 HANGUL LETTER YO-I +318A ; mapped ; 1191 # 1.1 HANGUL LETTER YU-YEO +318B ; mapped ; 1192 # 1.1 HANGUL LETTER YU-YE +318C ; mapped ; 1194 # 1.1 HANGUL LETTER YU-I +318D ; mapped ; 119E # 1.1 HANGUL LETTER ARAEA +318E ; mapped ; 11A1 # 1.1 HANGUL LETTER ARAEAE +318F ; disallowed # NA +3190..3191 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK +3192 ; mapped ; 4E00 # 1.1 IDEOGRAPHIC ANNOTATION ONE MARK +3193 ; mapped ; 4E8C # 1.1 IDEOGRAPHIC ANNOTATION TWO MARK +3194 ; mapped ; 4E09 # 1.1 IDEOGRAPHIC ANNOTATION THREE MARK +3195 ; mapped ; 56DB # 1.1 IDEOGRAPHIC ANNOTATION FOUR MARK +3196 ; mapped ; 4E0A # 1.1 IDEOGRAPHIC ANNOTATION TOP MARK +3197 ; mapped ; 4E2D # 1.1 IDEOGRAPHIC ANNOTATION MIDDLE MARK +3198 ; mapped ; 4E0B # 1.1 IDEOGRAPHIC ANNOTATION BOTTOM MARK +3199 ; mapped ; 7532 # 1.1 IDEOGRAPHIC ANNOTATION FIRST MARK +319A ; mapped ; 4E59 # 1.1 IDEOGRAPHIC ANNOTATION SECOND MARK +319B ; mapped ; 4E19 # 1.1 IDEOGRAPHIC ANNOTATION THIRD MARK +319C ; mapped ; 4E01 # 1.1 IDEOGRAPHIC ANNOTATION FOURTH MARK +319D ; mapped ; 5929 # 1.1 IDEOGRAPHIC ANNOTATION HEAVEN MARK +319E ; mapped ; 5730 # 1.1 IDEOGRAPHIC ANNOTATION EARTH MARK +319F ; mapped ; 4EBA # 1.1 IDEOGRAPHIC ANNOTATION MAN MARK +31A0..31B7 ; valid # 3.0 BOPOMOFO LETTER BU..BOPOMOFO FINAL LETTER H +31B8..31BA ; valid # 6.0 BOPOMOFO LETTER GH..BOPOMOFO LETTER ZY +31BB..31BF ; disallowed # NA .. +31C0..31CF ; valid ; ; NV8 # 4.1 CJK STROKE T..CJK STROKE N +31D0..31E3 ; valid ; ; NV8 # 5.1 CJK STROKE H..CJK STROKE Q +31E4..31EF ; disallowed # NA .. +31F0..31FF ; valid # 3.2 KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO +3200 ; disallowed_STD3_mapped ; 0028 1100 0029 #1.1 PARENTHESIZED HANGUL KIYEOK +3201 ; disallowed_STD3_mapped ; 0028 1102 0029 #1.1 PARENTHESIZED HANGUL NIEUN +3202 ; disallowed_STD3_mapped ; 0028 1103 0029 #1.1 PARENTHESIZED HANGUL TIKEUT +3203 ; disallowed_STD3_mapped ; 0028 1105 0029 #1.1 PARENTHESIZED HANGUL RIEUL +3204 ; disallowed_STD3_mapped ; 0028 1106 0029 #1.1 PARENTHESIZED HANGUL MIEUM +3205 ; disallowed_STD3_mapped ; 0028 1107 0029 #1.1 PARENTHESIZED HANGUL PIEUP +3206 ; disallowed_STD3_mapped ; 0028 1109 0029 #1.1 PARENTHESIZED HANGUL SIOS +3207 ; disallowed_STD3_mapped ; 0028 110B 0029 #1.1 PARENTHESIZED HANGUL IEUNG +3208 ; disallowed_STD3_mapped ; 0028 110C 0029 #1.1 PARENTHESIZED HANGUL CIEUC +3209 ; disallowed_STD3_mapped ; 0028 110E 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH +320A ; disallowed_STD3_mapped ; 0028 110F 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH +320B ; disallowed_STD3_mapped ; 0028 1110 0029 #1.1 PARENTHESIZED HANGUL THIEUTH +320C ; disallowed_STD3_mapped ; 0028 1111 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH +320D ; disallowed_STD3_mapped ; 0028 1112 0029 #1.1 PARENTHESIZED HANGUL HIEUH +320E ; disallowed_STD3_mapped ; 0028 AC00 0029 #1.1 PARENTHESIZED HANGUL KIYEOK A +320F ; disallowed_STD3_mapped ; 0028 B098 0029 #1.1 PARENTHESIZED HANGUL NIEUN A +3210 ; disallowed_STD3_mapped ; 0028 B2E4 0029 #1.1 PARENTHESIZED HANGUL TIKEUT A +3211 ; disallowed_STD3_mapped ; 0028 B77C 0029 #1.1 PARENTHESIZED HANGUL RIEUL A +3212 ; disallowed_STD3_mapped ; 0028 B9C8 0029 #1.1 PARENTHESIZED HANGUL MIEUM A +3213 ; disallowed_STD3_mapped ; 0028 BC14 0029 #1.1 PARENTHESIZED HANGUL PIEUP A +3214 ; disallowed_STD3_mapped ; 0028 C0AC 0029 #1.1 PARENTHESIZED HANGUL SIOS A +3215 ; disallowed_STD3_mapped ; 0028 C544 0029 #1.1 PARENTHESIZED HANGUL IEUNG A +3216 ; disallowed_STD3_mapped ; 0028 C790 0029 #1.1 PARENTHESIZED HANGUL CIEUC A +3217 ; disallowed_STD3_mapped ; 0028 CC28 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH A +3218 ; disallowed_STD3_mapped ; 0028 CE74 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH A +3219 ; disallowed_STD3_mapped ; 0028 D0C0 0029 #1.1 PARENTHESIZED HANGUL THIEUTH A +321A ; disallowed_STD3_mapped ; 0028 D30C 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH A +321B ; disallowed_STD3_mapped ; 0028 D558 0029 #1.1 PARENTHESIZED HANGUL HIEUH A +321C ; disallowed_STD3_mapped ; 0028 C8FC 0029 #1.1 PARENTHESIZED HANGUL CIEUC U +321D ; disallowed_STD3_mapped ; 0028 C624 C804 0029 #4.0 PARENTHESIZED KOREAN CHARACTER OJEON +321E ; disallowed_STD3_mapped ; 0028 C624 D6C4 0029 #4.0 PARENTHESIZED KOREAN CHARACTER O HU +321F ; disallowed # NA +3220 ; disallowed_STD3_mapped ; 0028 4E00 0029 #1.1 PARENTHESIZED IDEOGRAPH ONE +3221 ; disallowed_STD3_mapped ; 0028 4E8C 0029 #1.1 PARENTHESIZED IDEOGRAPH TWO +3222 ; disallowed_STD3_mapped ; 0028 4E09 0029 #1.1 PARENTHESIZED IDEOGRAPH THREE +3223 ; disallowed_STD3_mapped ; 0028 56DB 0029 #1.1 PARENTHESIZED IDEOGRAPH FOUR +3224 ; disallowed_STD3_mapped ; 0028 4E94 0029 #1.1 PARENTHESIZED IDEOGRAPH FIVE +3225 ; disallowed_STD3_mapped ; 0028 516D 0029 #1.1 PARENTHESIZED IDEOGRAPH SIX +3226 ; disallowed_STD3_mapped ; 0028 4E03 0029 #1.1 PARENTHESIZED IDEOGRAPH SEVEN +3227 ; disallowed_STD3_mapped ; 0028 516B 0029 #1.1 PARENTHESIZED IDEOGRAPH EIGHT +3228 ; disallowed_STD3_mapped ; 0028 4E5D 0029 #1.1 PARENTHESIZED IDEOGRAPH NINE +3229 ; disallowed_STD3_mapped ; 0028 5341 0029 #1.1 PARENTHESIZED IDEOGRAPH TEN +322A ; disallowed_STD3_mapped ; 0028 6708 0029 #1.1 PARENTHESIZED IDEOGRAPH MOON +322B ; disallowed_STD3_mapped ; 0028 706B 0029 #1.1 PARENTHESIZED IDEOGRAPH FIRE +322C ; disallowed_STD3_mapped ; 0028 6C34 0029 #1.1 PARENTHESIZED IDEOGRAPH WATER +322D ; disallowed_STD3_mapped ; 0028 6728 0029 #1.1 PARENTHESIZED IDEOGRAPH WOOD +322E ; disallowed_STD3_mapped ; 0028 91D1 0029 #1.1 PARENTHESIZED IDEOGRAPH METAL +322F ; disallowed_STD3_mapped ; 0028 571F 0029 #1.1 PARENTHESIZED IDEOGRAPH EARTH +3230 ; disallowed_STD3_mapped ; 0028 65E5 0029 #1.1 PARENTHESIZED IDEOGRAPH SUN +3231 ; disallowed_STD3_mapped ; 0028 682A 0029 #1.1 PARENTHESIZED IDEOGRAPH STOCK +3232 ; disallowed_STD3_mapped ; 0028 6709 0029 #1.1 PARENTHESIZED IDEOGRAPH HAVE +3233 ; disallowed_STD3_mapped ; 0028 793E 0029 #1.1 PARENTHESIZED IDEOGRAPH SOCIETY +3234 ; disallowed_STD3_mapped ; 0028 540D 0029 #1.1 PARENTHESIZED IDEOGRAPH NAME +3235 ; disallowed_STD3_mapped ; 0028 7279 0029 #1.1 PARENTHESIZED IDEOGRAPH SPECIAL +3236 ; disallowed_STD3_mapped ; 0028 8CA1 0029 #1.1 PARENTHESIZED IDEOGRAPH FINANCIAL +3237 ; disallowed_STD3_mapped ; 0028 795D 0029 #1.1 PARENTHESIZED IDEOGRAPH CONGRATULATION +3238 ; disallowed_STD3_mapped ; 0028 52B4 0029 #1.1 PARENTHESIZED IDEOGRAPH LABOR +3239 ; disallowed_STD3_mapped ; 0028 4EE3 0029 #1.1 PARENTHESIZED IDEOGRAPH REPRESENT +323A ; disallowed_STD3_mapped ; 0028 547C 0029 #1.1 PARENTHESIZED IDEOGRAPH CALL +323B ; disallowed_STD3_mapped ; 0028 5B66 0029 #1.1 PARENTHESIZED IDEOGRAPH STUDY +323C ; disallowed_STD3_mapped ; 0028 76E3 0029 #1.1 PARENTHESIZED IDEOGRAPH SUPERVISE +323D ; disallowed_STD3_mapped ; 0028 4F01 0029 #1.1 PARENTHESIZED IDEOGRAPH ENTERPRISE +323E ; disallowed_STD3_mapped ; 0028 8CC7 0029 #1.1 PARENTHESIZED IDEOGRAPH RESOURCE +323F ; disallowed_STD3_mapped ; 0028 5354 0029 #1.1 PARENTHESIZED IDEOGRAPH ALLIANCE +3240 ; disallowed_STD3_mapped ; 0028 796D 0029 #1.1 PARENTHESIZED IDEOGRAPH FESTIVAL +3241 ; disallowed_STD3_mapped ; 0028 4F11 0029 #1.1 PARENTHESIZED IDEOGRAPH REST +3242 ; disallowed_STD3_mapped ; 0028 81EA 0029 #1.1 PARENTHESIZED IDEOGRAPH SELF +3243 ; disallowed_STD3_mapped ; 0028 81F3 0029 #1.1 PARENTHESIZED IDEOGRAPH REACH +3244 ; mapped ; 554F # 5.2 CIRCLED IDEOGRAPH QUESTION +3245 ; mapped ; 5E7C # 5.2 CIRCLED IDEOGRAPH KINDERGARTEN +3246 ; mapped ; 6587 # 5.2 CIRCLED IDEOGRAPH SCHOOL +3247 ; mapped ; 7B8F # 5.2 CIRCLED IDEOGRAPH KOTO +3248..324F ; valid ; ; NV8 # 5.2 CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE +3250 ; mapped ; 0070 0074 0065 #4.0 PARTNERSHIP SIGN +3251 ; mapped ; 0032 0031 # 3.2 CIRCLED NUMBER TWENTY ONE +3252 ; mapped ; 0032 0032 # 3.2 CIRCLED NUMBER TWENTY TWO +3253 ; mapped ; 0032 0033 # 3.2 CIRCLED NUMBER TWENTY THREE +3254 ; mapped ; 0032 0034 # 3.2 CIRCLED NUMBER TWENTY FOUR +3255 ; mapped ; 0032 0035 # 3.2 CIRCLED NUMBER TWENTY FIVE +3256 ; mapped ; 0032 0036 # 3.2 CIRCLED NUMBER TWENTY SIX +3257 ; mapped ; 0032 0037 # 3.2 CIRCLED NUMBER TWENTY SEVEN +3258 ; mapped ; 0032 0038 # 3.2 CIRCLED NUMBER TWENTY EIGHT +3259 ; mapped ; 0032 0039 # 3.2 CIRCLED NUMBER TWENTY NINE +325A ; mapped ; 0033 0030 # 3.2 CIRCLED NUMBER THIRTY +325B ; mapped ; 0033 0031 # 3.2 CIRCLED NUMBER THIRTY ONE +325C ; mapped ; 0033 0032 # 3.2 CIRCLED NUMBER THIRTY TWO +325D ; mapped ; 0033 0033 # 3.2 CIRCLED NUMBER THIRTY THREE +325E ; mapped ; 0033 0034 # 3.2 CIRCLED NUMBER THIRTY FOUR +325F ; mapped ; 0033 0035 # 3.2 CIRCLED NUMBER THIRTY FIVE +3260 ; mapped ; 1100 # 1.1 CIRCLED HANGUL KIYEOK +3261 ; mapped ; 1102 # 1.1 CIRCLED HANGUL NIEUN +3262 ; mapped ; 1103 # 1.1 CIRCLED HANGUL TIKEUT +3263 ; mapped ; 1105 # 1.1 CIRCLED HANGUL RIEUL +3264 ; mapped ; 1106 # 1.1 CIRCLED HANGUL MIEUM +3265 ; mapped ; 1107 # 1.1 CIRCLED HANGUL PIEUP +3266 ; mapped ; 1109 # 1.1 CIRCLED HANGUL SIOS +3267 ; mapped ; 110B # 1.1 CIRCLED HANGUL IEUNG +3268 ; mapped ; 110C # 1.1 CIRCLED HANGUL CIEUC +3269 ; mapped ; 110E # 1.1 CIRCLED HANGUL CHIEUCH +326A ; mapped ; 110F # 1.1 CIRCLED HANGUL KHIEUKH +326B ; mapped ; 1110 # 1.1 CIRCLED HANGUL THIEUTH +326C ; mapped ; 1111 # 1.1 CIRCLED HANGUL PHIEUPH +326D ; mapped ; 1112 # 1.1 CIRCLED HANGUL HIEUH +326E ; mapped ; AC00 # 1.1 CIRCLED HANGUL KIYEOK A +326F ; mapped ; B098 # 1.1 CIRCLED HANGUL NIEUN A +3270 ; mapped ; B2E4 # 1.1 CIRCLED HANGUL TIKEUT A +3271 ; mapped ; B77C # 1.1 CIRCLED HANGUL RIEUL A +3272 ; mapped ; B9C8 # 1.1 CIRCLED HANGUL MIEUM A +3273 ; mapped ; BC14 # 1.1 CIRCLED HANGUL PIEUP A +3274 ; mapped ; C0AC # 1.1 CIRCLED HANGUL SIOS A +3275 ; mapped ; C544 # 1.1 CIRCLED HANGUL IEUNG A +3276 ; mapped ; C790 # 1.1 CIRCLED HANGUL CIEUC A +3277 ; mapped ; CC28 # 1.1 CIRCLED HANGUL CHIEUCH A +3278 ; mapped ; CE74 # 1.1 CIRCLED HANGUL KHIEUKH A +3279 ; mapped ; D0C0 # 1.1 CIRCLED HANGUL THIEUTH A +327A ; mapped ; D30C # 1.1 CIRCLED HANGUL PHIEUPH A +327B ; mapped ; D558 # 1.1 CIRCLED HANGUL HIEUH A +327C ; mapped ; CC38 ACE0 # 4.0 CIRCLED KOREAN CHARACTER CHAMKO +327D ; mapped ; C8FC C758 # 4.0 CIRCLED KOREAN CHARACTER JUEUI +327E ; mapped ; C6B0 # 4.1 CIRCLED HANGUL IEUNG U +327F ; valid ; ; NV8 # 1.1 KOREAN STANDARD SYMBOL +3280 ; mapped ; 4E00 # 1.1 CIRCLED IDEOGRAPH ONE +3281 ; mapped ; 4E8C # 1.1 CIRCLED IDEOGRAPH TWO +3282 ; mapped ; 4E09 # 1.1 CIRCLED IDEOGRAPH THREE +3283 ; mapped ; 56DB # 1.1 CIRCLED IDEOGRAPH FOUR +3284 ; mapped ; 4E94 # 1.1 CIRCLED IDEOGRAPH FIVE +3285 ; mapped ; 516D # 1.1 CIRCLED IDEOGRAPH SIX +3286 ; mapped ; 4E03 # 1.1 CIRCLED IDEOGRAPH SEVEN +3287 ; mapped ; 516B # 1.1 CIRCLED IDEOGRAPH EIGHT +3288 ; mapped ; 4E5D # 1.1 CIRCLED IDEOGRAPH NINE +3289 ; mapped ; 5341 # 1.1 CIRCLED IDEOGRAPH TEN +328A ; mapped ; 6708 # 1.1 CIRCLED IDEOGRAPH MOON +328B ; mapped ; 706B # 1.1 CIRCLED IDEOGRAPH FIRE +328C ; mapped ; 6C34 # 1.1 CIRCLED IDEOGRAPH WATER +328D ; mapped ; 6728 # 1.1 CIRCLED IDEOGRAPH WOOD +328E ; mapped ; 91D1 # 1.1 CIRCLED IDEOGRAPH METAL +328F ; mapped ; 571F # 1.1 CIRCLED IDEOGRAPH EARTH +3290 ; mapped ; 65E5 # 1.1 CIRCLED IDEOGRAPH SUN +3291 ; mapped ; 682A # 1.1 CIRCLED IDEOGRAPH STOCK +3292 ; mapped ; 6709 # 1.1 CIRCLED IDEOGRAPH HAVE +3293 ; mapped ; 793E # 1.1 CIRCLED IDEOGRAPH SOCIETY +3294 ; mapped ; 540D # 1.1 CIRCLED IDEOGRAPH NAME +3295 ; mapped ; 7279 # 1.1 CIRCLED IDEOGRAPH SPECIAL +3296 ; mapped ; 8CA1 # 1.1 CIRCLED IDEOGRAPH FINANCIAL +3297 ; mapped ; 795D # 1.1 CIRCLED IDEOGRAPH CONGRATULATION +3298 ; mapped ; 52B4 # 1.1 CIRCLED IDEOGRAPH LABOR +3299 ; mapped ; 79D8 # 1.1 CIRCLED IDEOGRAPH SECRET +329A ; mapped ; 7537 # 1.1 CIRCLED IDEOGRAPH MALE +329B ; mapped ; 5973 # 1.1 CIRCLED IDEOGRAPH FEMALE +329C ; mapped ; 9069 # 1.1 CIRCLED IDEOGRAPH SUITABLE +329D ; mapped ; 512A # 1.1 CIRCLED IDEOGRAPH EXCELLENT +329E ; mapped ; 5370 # 1.1 CIRCLED IDEOGRAPH PRINT +329F ; mapped ; 6CE8 # 1.1 CIRCLED IDEOGRAPH ATTENTION +32A0 ; mapped ; 9805 # 1.1 CIRCLED IDEOGRAPH ITEM +32A1 ; mapped ; 4F11 # 1.1 CIRCLED IDEOGRAPH REST +32A2 ; mapped ; 5199 # 1.1 CIRCLED IDEOGRAPH COPY +32A3 ; mapped ; 6B63 # 1.1 CIRCLED IDEOGRAPH CORRECT +32A4 ; mapped ; 4E0A # 1.1 CIRCLED IDEOGRAPH HIGH +32A5 ; mapped ; 4E2D # 1.1 CIRCLED IDEOGRAPH CENTRE +32A6 ; mapped ; 4E0B # 1.1 CIRCLED IDEOGRAPH LOW +32A7 ; mapped ; 5DE6 # 1.1 CIRCLED IDEOGRAPH LEFT +32A8 ; mapped ; 53F3 # 1.1 CIRCLED IDEOGRAPH RIGHT +32A9 ; mapped ; 533B # 1.1 CIRCLED IDEOGRAPH MEDICINE +32AA ; mapped ; 5B97 # 1.1 CIRCLED IDEOGRAPH RELIGION +32AB ; mapped ; 5B66 # 1.1 CIRCLED IDEOGRAPH STUDY +32AC ; mapped ; 76E3 # 1.1 CIRCLED IDEOGRAPH SUPERVISE +32AD ; mapped ; 4F01 # 1.1 CIRCLED IDEOGRAPH ENTERPRISE +32AE ; mapped ; 8CC7 # 1.1 CIRCLED IDEOGRAPH RESOURCE +32AF ; mapped ; 5354 # 1.1 CIRCLED IDEOGRAPH ALLIANCE +32B0 ; mapped ; 591C # 1.1 CIRCLED IDEOGRAPH NIGHT +32B1 ; mapped ; 0033 0036 # 3.2 CIRCLED NUMBER THIRTY SIX +32B2 ; mapped ; 0033 0037 # 3.2 CIRCLED NUMBER THIRTY SEVEN +32B3 ; mapped ; 0033 0038 # 3.2 CIRCLED NUMBER THIRTY EIGHT +32B4 ; mapped ; 0033 0039 # 3.2 CIRCLED NUMBER THIRTY NINE +32B5 ; mapped ; 0034 0030 # 3.2 CIRCLED NUMBER FORTY +32B6 ; mapped ; 0034 0031 # 3.2 CIRCLED NUMBER FORTY ONE +32B7 ; mapped ; 0034 0032 # 3.2 CIRCLED NUMBER FORTY TWO +32B8 ; mapped ; 0034 0033 # 3.2 CIRCLED NUMBER FORTY THREE +32B9 ; mapped ; 0034 0034 # 3.2 CIRCLED NUMBER FORTY FOUR +32BA ; mapped ; 0034 0035 # 3.2 CIRCLED NUMBER FORTY FIVE +32BB ; mapped ; 0034 0036 # 3.2 CIRCLED NUMBER FORTY SIX +32BC ; mapped ; 0034 0037 # 3.2 CIRCLED NUMBER FORTY SEVEN +32BD ; mapped ; 0034 0038 # 3.2 CIRCLED NUMBER FORTY EIGHT +32BE ; mapped ; 0034 0039 # 3.2 CIRCLED NUMBER FORTY NINE +32BF ; mapped ; 0035 0030 # 3.2 CIRCLED NUMBER FIFTY +32C0 ; mapped ; 0031 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY +32C1 ; mapped ; 0032 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR FEBRUARY +32C2 ; mapped ; 0033 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MARCH +32C3 ; mapped ; 0034 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR APRIL +32C4 ; mapped ; 0035 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MAY +32C5 ; mapped ; 0036 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JUNE +32C6 ; mapped ; 0037 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JULY +32C7 ; mapped ; 0038 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR AUGUST +32C8 ; mapped ; 0039 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR SEPTEMBER +32C9 ; mapped ; 0031 0030 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR OCTOBER +32CA ; mapped ; 0031 0031 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR NOVEMBER +32CB ; mapped ; 0031 0032 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DECEMBER +32CC ; mapped ; 0068 0067 # 4.0 SQUARE HG +32CD ; mapped ; 0065 0072 0067 #4.0 SQUARE ERG +32CE ; mapped ; 0065 0076 # 4.0 SQUARE EV +32CF ; mapped ; 006C 0074 0064 #4.0 LIMITED LIABILITY SIGN +32D0 ; mapped ; 30A2 # 1.1 CIRCLED KATAKANA A +32D1 ; mapped ; 30A4 # 1.1 CIRCLED KATAKANA I +32D2 ; mapped ; 30A6 # 1.1 CIRCLED KATAKANA U +32D3 ; mapped ; 30A8 # 1.1 CIRCLED KATAKANA E +32D4 ; mapped ; 30AA # 1.1 CIRCLED KATAKANA O +32D5 ; mapped ; 30AB # 1.1 CIRCLED KATAKANA KA +32D6 ; mapped ; 30AD # 1.1 CIRCLED KATAKANA KI +32D7 ; mapped ; 30AF # 1.1 CIRCLED KATAKANA KU +32D8 ; mapped ; 30B1 # 1.1 CIRCLED KATAKANA KE +32D9 ; mapped ; 30B3 # 1.1 CIRCLED KATAKANA KO +32DA ; mapped ; 30B5 # 1.1 CIRCLED KATAKANA SA +32DB ; mapped ; 30B7 # 1.1 CIRCLED KATAKANA SI +32DC ; mapped ; 30B9 # 1.1 CIRCLED KATAKANA SU +32DD ; mapped ; 30BB # 1.1 CIRCLED KATAKANA SE +32DE ; mapped ; 30BD # 1.1 CIRCLED KATAKANA SO +32DF ; mapped ; 30BF # 1.1 CIRCLED KATAKANA TA +32E0 ; mapped ; 30C1 # 1.1 CIRCLED KATAKANA TI +32E1 ; mapped ; 30C4 # 1.1 CIRCLED KATAKANA TU +32E2 ; mapped ; 30C6 # 1.1 CIRCLED KATAKANA TE +32E3 ; mapped ; 30C8 # 1.1 CIRCLED KATAKANA TO +32E4 ; mapped ; 30CA # 1.1 CIRCLED KATAKANA NA +32E5 ; mapped ; 30CB # 1.1 CIRCLED KATAKANA NI +32E6 ; mapped ; 30CC # 1.1 CIRCLED KATAKANA NU +32E7 ; mapped ; 30CD # 1.1 CIRCLED KATAKANA NE +32E8 ; mapped ; 30CE # 1.1 CIRCLED KATAKANA NO +32E9 ; mapped ; 30CF # 1.1 CIRCLED KATAKANA HA +32EA ; mapped ; 30D2 # 1.1 CIRCLED KATAKANA HI +32EB ; mapped ; 30D5 # 1.1 CIRCLED KATAKANA HU +32EC ; mapped ; 30D8 # 1.1 CIRCLED KATAKANA HE +32ED ; mapped ; 30DB # 1.1 CIRCLED KATAKANA HO +32EE ; mapped ; 30DE # 1.1 CIRCLED KATAKANA MA +32EF ; mapped ; 30DF # 1.1 CIRCLED KATAKANA MI +32F0 ; mapped ; 30E0 # 1.1 CIRCLED KATAKANA MU +32F1 ; mapped ; 30E1 # 1.1 CIRCLED KATAKANA ME +32F2 ; mapped ; 30E2 # 1.1 CIRCLED KATAKANA MO +32F3 ; mapped ; 30E4 # 1.1 CIRCLED KATAKANA YA +32F4 ; mapped ; 30E6 # 1.1 CIRCLED KATAKANA YU +32F5 ; mapped ; 30E8 # 1.1 CIRCLED KATAKANA YO +32F6 ; mapped ; 30E9 # 1.1 CIRCLED KATAKANA RA +32F7 ; mapped ; 30EA # 1.1 CIRCLED KATAKANA RI +32F8 ; mapped ; 30EB # 1.1 CIRCLED KATAKANA RU +32F9 ; mapped ; 30EC # 1.1 CIRCLED KATAKANA RE +32FA ; mapped ; 30ED # 1.1 CIRCLED KATAKANA RO +32FB ; mapped ; 30EF # 1.1 CIRCLED KATAKANA WA +32FC ; mapped ; 30F0 # 1.1 CIRCLED KATAKANA WI +32FD ; mapped ; 30F1 # 1.1 CIRCLED KATAKANA WE +32FE ; mapped ; 30F2 # 1.1 CIRCLED KATAKANA WO +32FF ; disallowed # NA +3300 ; mapped ; 30A2 30D1 30FC 30C8 #1.1 SQUARE APAATO +3301 ; mapped ; 30A2 30EB 30D5 30A1 #1.1 SQUARE ARUHUA +3302 ; mapped ; 30A2 30F3 30DA 30A2 #1.1 SQUARE ANPEA +3303 ; mapped ; 30A2 30FC 30EB #1.1 SQUARE AARU +3304 ; mapped ; 30A4 30CB 30F3 30B0 #1.1 SQUARE ININGU +3305 ; mapped ; 30A4 30F3 30C1 #1.1 SQUARE INTI +3306 ; mapped ; 30A6 30A9 30F3 #1.1 SQUARE UON +3307 ; mapped ; 30A8 30B9 30AF 30FC 30C9 #1.1 SQUARE ESUKUUDO +3308 ; mapped ; 30A8 30FC 30AB 30FC #1.1 SQUARE EEKAA +3309 ; mapped ; 30AA 30F3 30B9 #1.1 SQUARE ONSU +330A ; mapped ; 30AA 30FC 30E0 #1.1 SQUARE OOMU +330B ; mapped ; 30AB 30A4 30EA #1.1 SQUARE KAIRI +330C ; mapped ; 30AB 30E9 30C3 30C8 #1.1 SQUARE KARATTO +330D ; mapped ; 30AB 30ED 30EA 30FC #1.1 SQUARE KARORII +330E ; mapped ; 30AC 30ED 30F3 #1.1 SQUARE GARON +330F ; mapped ; 30AC 30F3 30DE #1.1 SQUARE GANMA +3310 ; mapped ; 30AE 30AC # 1.1 SQUARE GIGA +3311 ; mapped ; 30AE 30CB 30FC #1.1 SQUARE GINII +3312 ; mapped ; 30AD 30E5 30EA 30FC #1.1 SQUARE KYURII +3313 ; mapped ; 30AE 30EB 30C0 30FC #1.1 SQUARE GIRUDAA +3314 ; mapped ; 30AD 30ED # 1.1 SQUARE KIRO +3315 ; mapped ; 30AD 30ED 30B0 30E9 30E0 #1.1 SQUARE KIROGURAMU +3316 ; mapped ; 30AD 30ED 30E1 30FC 30C8 30EB #1.1 SQUARE KIROMEETORU +3317 ; mapped ; 30AD 30ED 30EF 30C3 30C8 #1.1 SQUARE KIROWATTO +3318 ; mapped ; 30B0 30E9 30E0 #1.1 SQUARE GURAMU +3319 ; mapped ; 30B0 30E9 30E0 30C8 30F3 #1.1 SQUARE GURAMUTON +331A ; mapped ; 30AF 30EB 30BC 30A4 30ED #1.1 SQUARE KURUZEIRO +331B ; mapped ; 30AF 30ED 30FC 30CD #1.1 SQUARE KUROONE +331C ; mapped ; 30B1 30FC 30B9 #1.1 SQUARE KEESU +331D ; mapped ; 30B3 30EB 30CA #1.1 SQUARE KORUNA +331E ; mapped ; 30B3 30FC 30DD #1.1 SQUARE KOOPO +331F ; mapped ; 30B5 30A4 30AF 30EB #1.1 SQUARE SAIKURU +3320 ; mapped ; 30B5 30F3 30C1 30FC 30E0 #1.1 SQUARE SANTIIMU +3321 ; mapped ; 30B7 30EA 30F3 30B0 #1.1 SQUARE SIRINGU +3322 ; mapped ; 30BB 30F3 30C1 #1.1 SQUARE SENTI +3323 ; mapped ; 30BB 30F3 30C8 #1.1 SQUARE SENTO +3324 ; mapped ; 30C0 30FC 30B9 #1.1 SQUARE DAASU +3325 ; mapped ; 30C7 30B7 # 1.1 SQUARE DESI +3326 ; mapped ; 30C9 30EB # 1.1 SQUARE DORU +3327 ; mapped ; 30C8 30F3 # 1.1 SQUARE TON +3328 ; mapped ; 30CA 30CE # 1.1 SQUARE NANO +3329 ; mapped ; 30CE 30C3 30C8 #1.1 SQUARE NOTTO +332A ; mapped ; 30CF 30A4 30C4 #1.1 SQUARE HAITU +332B ; mapped ; 30D1 30FC 30BB 30F3 30C8 #1.1 SQUARE PAASENTO +332C ; mapped ; 30D1 30FC 30C4 #1.1 SQUARE PAATU +332D ; mapped ; 30D0 30FC 30EC 30EB #1.1 SQUARE BAARERU +332E ; mapped ; 30D4 30A2 30B9 30C8 30EB #1.1 SQUARE PIASUTORU +332F ; mapped ; 30D4 30AF 30EB #1.1 SQUARE PIKURU +3330 ; mapped ; 30D4 30B3 # 1.1 SQUARE PIKO +3331 ; mapped ; 30D3 30EB # 1.1 SQUARE BIRU +3332 ; mapped ; 30D5 30A1 30E9 30C3 30C9 #1.1 SQUARE HUARADDO +3333 ; mapped ; 30D5 30A3 30FC 30C8 #1.1 SQUARE HUIITO +3334 ; mapped ; 30D6 30C3 30B7 30A7 30EB #1.1 SQUARE BUSSYERU +3335 ; mapped ; 30D5 30E9 30F3 #1.1 SQUARE HURAN +3336 ; mapped ; 30D8 30AF 30BF 30FC 30EB #1.1 SQUARE HEKUTAARU +3337 ; mapped ; 30DA 30BD # 1.1 SQUARE PESO +3338 ; mapped ; 30DA 30CB 30D2 #1.1 SQUARE PENIHI +3339 ; mapped ; 30D8 30EB 30C4 #1.1 SQUARE HERUTU +333A ; mapped ; 30DA 30F3 30B9 #1.1 SQUARE PENSU +333B ; mapped ; 30DA 30FC 30B8 #1.1 SQUARE PEEZI +333C ; mapped ; 30D9 30FC 30BF #1.1 SQUARE BEETA +333D ; mapped ; 30DD 30A4 30F3 30C8 #1.1 SQUARE POINTO +333E ; mapped ; 30DC 30EB 30C8 #1.1 SQUARE BORUTO +333F ; mapped ; 30DB 30F3 # 1.1 SQUARE HON +3340 ; mapped ; 30DD 30F3 30C9 #1.1 SQUARE PONDO +3341 ; mapped ; 30DB 30FC 30EB #1.1 SQUARE HOORU +3342 ; mapped ; 30DB 30FC 30F3 #1.1 SQUARE HOON +3343 ; mapped ; 30DE 30A4 30AF 30ED #1.1 SQUARE MAIKURO +3344 ; mapped ; 30DE 30A4 30EB #1.1 SQUARE MAIRU +3345 ; mapped ; 30DE 30C3 30CF #1.1 SQUARE MAHHA +3346 ; mapped ; 30DE 30EB 30AF #1.1 SQUARE MARUKU +3347 ; mapped ; 30DE 30F3 30B7 30E7 30F3 #1.1 SQUARE MANSYON +3348 ; mapped ; 30DF 30AF 30ED 30F3 #1.1 SQUARE MIKURON +3349 ; mapped ; 30DF 30EA # 1.1 SQUARE MIRI +334A ; mapped ; 30DF 30EA 30D0 30FC 30EB #1.1 SQUARE MIRIBAARU +334B ; mapped ; 30E1 30AC # 1.1 SQUARE MEGA +334C ; mapped ; 30E1 30AC 30C8 30F3 #1.1 SQUARE MEGATON +334D ; mapped ; 30E1 30FC 30C8 30EB #1.1 SQUARE MEETORU +334E ; mapped ; 30E4 30FC 30C9 #1.1 SQUARE YAADO +334F ; mapped ; 30E4 30FC 30EB #1.1 SQUARE YAARU +3350 ; mapped ; 30E6 30A2 30F3 #1.1 SQUARE YUAN +3351 ; mapped ; 30EA 30C3 30C8 30EB #1.1 SQUARE RITTORU +3352 ; mapped ; 30EA 30E9 # 1.1 SQUARE RIRA +3353 ; mapped ; 30EB 30D4 30FC #1.1 SQUARE RUPII +3354 ; mapped ; 30EB 30FC 30D6 30EB #1.1 SQUARE RUUBURU +3355 ; mapped ; 30EC 30E0 # 1.1 SQUARE REMU +3356 ; mapped ; 30EC 30F3 30C8 30B2 30F3 #1.1 SQUARE RENTOGEN +3357 ; mapped ; 30EF 30C3 30C8 #1.1 SQUARE WATTO +3358 ; mapped ; 0030 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ZERO +3359 ; mapped ; 0031 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ONE +335A ; mapped ; 0032 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWO +335B ; mapped ; 0033 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THREE +335C ; mapped ; 0034 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOUR +335D ; mapped ; 0035 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIVE +335E ; mapped ; 0036 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIX +335F ; mapped ; 0037 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVEN +3360 ; mapped ; 0038 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHT +3361 ; mapped ; 0039 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINE +3362 ; mapped ; 0031 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TEN +3363 ; mapped ; 0031 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ELEVEN +3364 ; mapped ; 0031 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWELVE +3365 ; mapped ; 0031 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THIRTEEN +3366 ; mapped ; 0031 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOURTEEN +3367 ; mapped ; 0031 0035 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIFTEEN +3368 ; mapped ; 0031 0036 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIXTEEN +3369 ; mapped ; 0031 0037 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVENTEEN +336A ; mapped ; 0031 0038 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHTEEN +336B ; mapped ; 0031 0039 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINETEEN +336C ; mapped ; 0032 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY +336D ; mapped ; 0032 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-ONE +336E ; mapped ; 0032 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-TWO +336F ; mapped ; 0032 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-THREE +3370 ; mapped ; 0032 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-FOUR +3371 ; mapped ; 0068 0070 0061 #1.1 SQUARE HPA +3372 ; mapped ; 0064 0061 # 1.1 SQUARE DA +3373 ; mapped ; 0061 0075 # 1.1 SQUARE AU +3374 ; mapped ; 0062 0061 0072 #1.1 SQUARE BAR +3375 ; mapped ; 006F 0076 # 1.1 SQUARE OV +3376 ; mapped ; 0070 0063 # 1.1 SQUARE PC +3377 ; mapped ; 0064 006D # 4.0 SQUARE DM +3378 ; mapped ; 0064 006D 0032 #4.0 SQUARE DM SQUARED +3379 ; mapped ; 0064 006D 0033 #4.0 SQUARE DM CUBED +337A ; mapped ; 0069 0075 # 4.0 SQUARE IU +337B ; mapped ; 5E73 6210 # 1.1 SQUARE ERA NAME HEISEI +337C ; mapped ; 662D 548C # 1.1 SQUARE ERA NAME SYOUWA +337D ; mapped ; 5927 6B63 # 1.1 SQUARE ERA NAME TAISYOU +337E ; mapped ; 660E 6CBB # 1.1 SQUARE ERA NAME MEIZI +337F ; mapped ; 682A 5F0F 4F1A 793E #1.1 SQUARE CORPORATION +3380 ; mapped ; 0070 0061 # 1.1 SQUARE PA AMPS +3381 ; mapped ; 006E 0061 # 1.1 SQUARE NA +3382 ; mapped ; 03BC 0061 # 1.1 SQUARE MU A +3383 ; mapped ; 006D 0061 # 1.1 SQUARE MA +3384 ; mapped ; 006B 0061 # 1.1 SQUARE KA +3385 ; mapped ; 006B 0062 # 1.1 SQUARE KB +3386 ; mapped ; 006D 0062 # 1.1 SQUARE MB +3387 ; mapped ; 0067 0062 # 1.1 SQUARE GB +3388 ; mapped ; 0063 0061 006C #1.1 SQUARE CAL +3389 ; mapped ; 006B 0063 0061 006C #1.1 SQUARE KCAL +338A ; mapped ; 0070 0066 # 1.1 SQUARE PF +338B ; mapped ; 006E 0066 # 1.1 SQUARE NF +338C ; mapped ; 03BC 0066 # 1.1 SQUARE MU F +338D ; mapped ; 03BC 0067 # 1.1 SQUARE MU G +338E ; mapped ; 006D 0067 # 1.1 SQUARE MG +338F ; mapped ; 006B 0067 # 1.1 SQUARE KG +3390 ; mapped ; 0068 007A # 1.1 SQUARE HZ +3391 ; mapped ; 006B 0068 007A #1.1 SQUARE KHZ +3392 ; mapped ; 006D 0068 007A #1.1 SQUARE MHZ +3393 ; mapped ; 0067 0068 007A #1.1 SQUARE GHZ +3394 ; mapped ; 0074 0068 007A #1.1 SQUARE THZ +3395 ; mapped ; 03BC 006C # 1.1 SQUARE MU L +3396 ; mapped ; 006D 006C # 1.1 SQUARE ML +3397 ; mapped ; 0064 006C # 1.1 SQUARE DL +3398 ; mapped ; 006B 006C # 1.1 SQUARE KL +3399 ; mapped ; 0066 006D # 1.1 SQUARE FM +339A ; mapped ; 006E 006D # 1.1 SQUARE NM +339B ; mapped ; 03BC 006D # 1.1 SQUARE MU M +339C ; mapped ; 006D 006D # 1.1 SQUARE MM +339D ; mapped ; 0063 006D # 1.1 SQUARE CM +339E ; mapped ; 006B 006D # 1.1 SQUARE KM +339F ; mapped ; 006D 006D 0032 #1.1 SQUARE MM SQUARED +33A0 ; mapped ; 0063 006D 0032 #1.1 SQUARE CM SQUARED +33A1 ; mapped ; 006D 0032 # 1.1 SQUARE M SQUARED +33A2 ; mapped ; 006B 006D 0032 #1.1 SQUARE KM SQUARED +33A3 ; mapped ; 006D 006D 0033 #1.1 SQUARE MM CUBED +33A4 ; mapped ; 0063 006D 0033 #1.1 SQUARE CM CUBED +33A5 ; mapped ; 006D 0033 # 1.1 SQUARE M CUBED +33A6 ; mapped ; 006B 006D 0033 #1.1 SQUARE KM CUBED +33A7 ; mapped ; 006D 2215 0073 #1.1 SQUARE M OVER S +33A8 ; mapped ; 006D 2215 0073 0032 #1.1 SQUARE M OVER S SQUARED +33A9 ; mapped ; 0070 0061 # 1.1 SQUARE PA +33AA ; mapped ; 006B 0070 0061 #1.1 SQUARE KPA +33AB ; mapped ; 006D 0070 0061 #1.1 SQUARE MPA +33AC ; mapped ; 0067 0070 0061 #1.1 SQUARE GPA +33AD ; mapped ; 0072 0061 0064 #1.1 SQUARE RAD +33AE ; mapped ; 0072 0061 0064 2215 0073 #1.1 SQUARE RAD OVER S +33AF ; mapped ; 0072 0061 0064 2215 0073 0032 #1.1 SQUARE RAD OVER S SQUARED +33B0 ; mapped ; 0070 0073 # 1.1 SQUARE PS +33B1 ; mapped ; 006E 0073 # 1.1 SQUARE NS +33B2 ; mapped ; 03BC 0073 # 1.1 SQUARE MU S +33B3 ; mapped ; 006D 0073 # 1.1 SQUARE MS +33B4 ; mapped ; 0070 0076 # 1.1 SQUARE PV +33B5 ; mapped ; 006E 0076 # 1.1 SQUARE NV +33B6 ; mapped ; 03BC 0076 # 1.1 SQUARE MU V +33B7 ; mapped ; 006D 0076 # 1.1 SQUARE MV +33B8 ; mapped ; 006B 0076 # 1.1 SQUARE KV +33B9 ; mapped ; 006D 0076 # 1.1 SQUARE MV MEGA +33BA ; mapped ; 0070 0077 # 1.1 SQUARE PW +33BB ; mapped ; 006E 0077 # 1.1 SQUARE NW +33BC ; mapped ; 03BC 0077 # 1.1 SQUARE MU W +33BD ; mapped ; 006D 0077 # 1.1 SQUARE MW +33BE ; mapped ; 006B 0077 # 1.1 SQUARE KW +33BF ; mapped ; 006D 0077 # 1.1 SQUARE MW MEGA +33C0 ; mapped ; 006B 03C9 # 1.1 SQUARE K OHM +33C1 ; mapped ; 006D 03C9 # 1.1 SQUARE M OHM +33C2 ; disallowed # 1.1 SQUARE AM +33C3 ; mapped ; 0062 0071 # 1.1 SQUARE BQ +33C4 ; mapped ; 0063 0063 # 1.1 SQUARE CC +33C5 ; mapped ; 0063 0064 # 1.1 SQUARE CD +33C6 ; mapped ; 0063 2215 006B 0067 #1.1 SQUARE C OVER KG +33C7 ; disallowed # 1.1 SQUARE CO +33C8 ; mapped ; 0064 0062 # 1.1 SQUARE DB +33C9 ; mapped ; 0067 0079 # 1.1 SQUARE GY +33CA ; mapped ; 0068 0061 # 1.1 SQUARE HA +33CB ; mapped ; 0068 0070 # 1.1 SQUARE HP +33CC ; mapped ; 0069 006E # 1.1 SQUARE IN +33CD ; mapped ; 006B 006B # 1.1 SQUARE KK +33CE ; mapped ; 006B 006D # 1.1 SQUARE KM CAPITAL +33CF ; mapped ; 006B 0074 # 1.1 SQUARE KT +33D0 ; mapped ; 006C 006D # 1.1 SQUARE LM +33D1 ; mapped ; 006C 006E # 1.1 SQUARE LN +33D2 ; mapped ; 006C 006F 0067 #1.1 SQUARE LOG +33D3 ; mapped ; 006C 0078 # 1.1 SQUARE LX +33D4 ; mapped ; 006D 0062 # 1.1 SQUARE MB SMALL +33D5 ; mapped ; 006D 0069 006C #1.1 SQUARE MIL +33D6 ; mapped ; 006D 006F 006C #1.1 SQUARE MOL +33D7 ; mapped ; 0070 0068 # 1.1 SQUARE PH +33D8 ; disallowed # 1.1 SQUARE PM +33D9 ; mapped ; 0070 0070 006D #1.1 SQUARE PPM +33DA ; mapped ; 0070 0072 # 1.1 SQUARE PR +33DB ; mapped ; 0073 0072 # 1.1 SQUARE SR +33DC ; mapped ; 0073 0076 # 1.1 SQUARE SV +33DD ; mapped ; 0077 0062 # 1.1 SQUARE WB +33DE ; mapped ; 0076 2215 006D #4.0 SQUARE V OVER M +33DF ; mapped ; 0061 2215 006D #4.0 SQUARE A OVER M +33E0 ; mapped ; 0031 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ONE +33E1 ; mapped ; 0032 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWO +33E2 ; mapped ; 0033 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THREE +33E3 ; mapped ; 0034 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOUR +33E4 ; mapped ; 0035 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIVE +33E5 ; mapped ; 0036 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIX +33E6 ; mapped ; 0037 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVEN +33E7 ; mapped ; 0038 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHT +33E8 ; mapped ; 0039 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINE +33E9 ; mapped ; 0031 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TEN +33EA ; mapped ; 0031 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ELEVEN +33EB ; mapped ; 0031 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWELVE +33EC ; mapped ; 0031 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTEEN +33ED ; mapped ; 0031 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOURTEEN +33EE ; mapped ; 0031 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIFTEEN +33EF ; mapped ; 0031 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIXTEEN +33F0 ; mapped ; 0031 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVENTEEN +33F1 ; mapped ; 0031 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHTEEN +33F2 ; mapped ; 0031 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINETEEN +33F3 ; mapped ; 0032 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY +33F4 ; mapped ; 0032 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-ONE +33F5 ; mapped ; 0032 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-TWO +33F6 ; mapped ; 0032 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-THREE +33F7 ; mapped ; 0032 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FOUR +33F8 ; mapped ; 0032 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FIVE +33F9 ; mapped ; 0032 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SIX +33FA ; mapped ; 0032 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SEVEN +33FB ; mapped ; 0032 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-EIGHT +33FC ; mapped ; 0032 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-NINE +33FD ; mapped ; 0033 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY +33FE ; mapped ; 0033 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY-ONE +33FF ; mapped ; 0067 0061 006C #4.0 SQUARE GAL +3400..4DB5 ; valid # 3.0 CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DB5 +4DB6..4DBF ; disallowed # NA .. +4DC0..4DFF ; valid ; ; NV8 # 4.0 HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION +4E00..9FA5 ; valid # 1.1 CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FA5 +9FA6..9FBB ; valid # 4.1 CJK UNIFIED IDEOGRAPH-9FA6..CJK UNIFIED IDEOGRAPH-9FBB +9FBC..9FC3 ; valid # 5.1 CJK UNIFIED IDEOGRAPH-9FBC..CJK UNIFIED IDEOGRAPH-9FC3 +9FC4..9FCB ; valid # 5.2 CJK UNIFIED IDEOGRAPH-9FC4..CJK UNIFIED IDEOGRAPH-9FCB +9FCC ; valid # 6.1 CJK UNIFIED IDEOGRAPH-9FCC +9FCD..9FD5 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-9FCD..CJK UNIFIED IDEOGRAPH-9FD5 +9FD6..9FFF ; disallowed # NA .. +A000..A48C ; valid # 3.0 YI SYLLABLE IT..YI SYLLABLE YYR +A48D..A48F ; disallowed # NA .. +A490..A4A1 ; valid ; ; NV8 # 3.0 YI RADICAL QOT..YI RADICAL GA +A4A2..A4A3 ; valid ; ; NV8 # 3.2 YI RADICAL ZUP..YI RADICAL CYT +A4A4..A4B3 ; valid ; ; NV8 # 3.0 YI RADICAL DDUR..YI RADICAL JO +A4B4 ; valid ; ; NV8 # 3.2 YI RADICAL NZUP +A4B5..A4C0 ; valid ; ; NV8 # 3.0 YI RADICAL JJY..YI RADICAL SHAT +A4C1 ; valid ; ; NV8 # 3.2 YI RADICAL ZUR +A4C2..A4C4 ; valid ; ; NV8 # 3.0 YI RADICAL SHOP..YI RADICAL ZZIET +A4C5 ; valid ; ; NV8 # 3.2 YI RADICAL NBIE +A4C6 ; valid ; ; NV8 # 3.0 YI RADICAL KE +A4C7..A4CF ; disallowed # NA .. +A4D0..A4FD ; valid # 5.2 LISU LETTER BA..LISU LETTER TONE MYA JEU +A4FE..A4FF ; valid ; ; NV8 # 5.2 LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP +A500..A60C ; valid # 5.1 VAI SYLLABLE EE..VAI SYLLABLE LENGTHENER +A60D..A60F ; valid ; ; NV8 # 5.1 VAI COMMA..VAI QUESTION MARK +A610..A62B ; valid # 5.1 VAI SYLLABLE NDOLE FA..VAI SYLLABLE NDOLE DO +A62C..A63F ; disallowed # NA .. +A640 ; mapped ; A641 # 5.1 CYRILLIC CAPITAL LETTER ZEMLYA +A641 ; valid # 5.1 CYRILLIC SMALL LETTER ZEMLYA +A642 ; mapped ; A643 # 5.1 CYRILLIC CAPITAL LETTER DZELO +A643 ; valid # 5.1 CYRILLIC SMALL LETTER DZELO +A644 ; mapped ; A645 # 5.1 CYRILLIC CAPITAL LETTER REVERSED DZE +A645 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED DZE +A646 ; mapped ; A647 # 5.1 CYRILLIC CAPITAL LETTER IOTA +A647 ; valid # 5.1 CYRILLIC SMALL LETTER IOTA +A648 ; mapped ; A649 # 5.1 CYRILLIC CAPITAL LETTER DJERV +A649 ; valid # 5.1 CYRILLIC SMALL LETTER DJERV +A64A ; mapped ; A64B # 5.1 CYRILLIC CAPITAL LETTER MONOGRAPH UK +A64B ; valid # 5.1 CYRILLIC SMALL LETTER MONOGRAPH UK +A64C ; mapped ; A64D # 5.1 CYRILLIC CAPITAL LETTER BROAD OMEGA +A64D ; valid # 5.1 CYRILLIC SMALL LETTER BROAD OMEGA +A64E ; mapped ; A64F # 5.1 CYRILLIC CAPITAL LETTER NEUTRAL YER +A64F ; valid # 5.1 CYRILLIC SMALL LETTER NEUTRAL YER +A650 ; mapped ; A651 # 5.1 CYRILLIC CAPITAL LETTER YERU WITH BACK YER +A651 ; valid # 5.1 CYRILLIC SMALL LETTER YERU WITH BACK YER +A652 ; mapped ; A653 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED YAT +A653 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED YAT +A654 ; mapped ; A655 # 5.1 CYRILLIC CAPITAL LETTER REVERSED YU +A655 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED YU +A656 ; mapped ; A657 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED A +A657 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED A +A658 ; mapped ; A659 # 5.1 CYRILLIC CAPITAL LETTER CLOSED LITTLE YUS +A659 ; valid # 5.1 CYRILLIC SMALL LETTER CLOSED LITTLE YUS +A65A ; mapped ; A65B # 5.1 CYRILLIC CAPITAL LETTER BLENDED YUS +A65B ; valid # 5.1 CYRILLIC SMALL LETTER BLENDED YUS +A65C ; mapped ; A65D # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED CLOSED LITTLE YUS +A65D ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED CLOSED LITTLE YUS +A65E ; mapped ; A65F # 5.1 CYRILLIC CAPITAL LETTER YN +A65F ; valid # 5.1 CYRILLIC SMALL LETTER YN +A660 ; mapped ; A661 # 6.0 CYRILLIC CAPITAL LETTER REVERSED TSE +A661 ; valid # 6.0 CYRILLIC SMALL LETTER REVERSED TSE +A662 ; mapped ; A663 # 5.1 CYRILLIC CAPITAL LETTER SOFT DE +A663 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT DE +A664 ; mapped ; A665 # 5.1 CYRILLIC CAPITAL LETTER SOFT EL +A665 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EL +A666 ; mapped ; A667 # 5.1 CYRILLIC CAPITAL LETTER SOFT EM +A667 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EM +A668 ; mapped ; A669 # 5.1 CYRILLIC CAPITAL LETTER MONOCULAR O +A669 ; valid # 5.1 CYRILLIC SMALL LETTER MONOCULAR O +A66A ; mapped ; A66B # 5.1 CYRILLIC CAPITAL LETTER BINOCULAR O +A66B ; valid # 5.1 CYRILLIC SMALL LETTER BINOCULAR O +A66C ; mapped ; A66D # 5.1 CYRILLIC CAPITAL LETTER DOUBLE MONOCULAR O +A66D..A66F ; valid # 5.1 CYRILLIC SMALL LETTER DOUBLE MONOCULAR O..COMBINING CYRILLIC VZMET +A670..A673 ; valid ; ; NV8 # 5.1 COMBINING CYRILLIC TEN MILLIONS SIGN..SLAVONIC ASTERISK +A674..A67B ; valid # 6.1 COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC LETTER OMEGA +A67C..A67D ; valid # 5.1 COMBINING CYRILLIC KAVYKA..COMBINING CYRILLIC PAYEROK +A67E ; valid ; ; NV8 # 5.1 CYRILLIC KAVYKA +A67F ; valid # 5.1 CYRILLIC PAYEROK +A680 ; mapped ; A681 # 5.1 CYRILLIC CAPITAL LETTER DWE +A681 ; valid # 5.1 CYRILLIC SMALL LETTER DWE +A682 ; mapped ; A683 # 5.1 CYRILLIC CAPITAL LETTER DZWE +A683 ; valid # 5.1 CYRILLIC SMALL LETTER DZWE +A684 ; mapped ; A685 # 5.1 CYRILLIC CAPITAL LETTER ZHWE +A685 ; valid # 5.1 CYRILLIC SMALL LETTER ZHWE +A686 ; mapped ; A687 # 5.1 CYRILLIC CAPITAL LETTER CCHE +A687 ; valid # 5.1 CYRILLIC SMALL LETTER CCHE +A688 ; mapped ; A689 # 5.1 CYRILLIC CAPITAL LETTER DZZE +A689 ; valid # 5.1 CYRILLIC SMALL LETTER DZZE +A68A ; mapped ; A68B # 5.1 CYRILLIC CAPITAL LETTER TE WITH MIDDLE HOOK +A68B ; valid # 5.1 CYRILLIC SMALL LETTER TE WITH MIDDLE HOOK +A68C ; mapped ; A68D # 5.1 CYRILLIC CAPITAL LETTER TWE +A68D ; valid # 5.1 CYRILLIC SMALL LETTER TWE +A68E ; mapped ; A68F # 5.1 CYRILLIC CAPITAL LETTER TSWE +A68F ; valid # 5.1 CYRILLIC SMALL LETTER TSWE +A690 ; mapped ; A691 # 5.1 CYRILLIC CAPITAL LETTER TSSE +A691 ; valid # 5.1 CYRILLIC SMALL LETTER TSSE +A692 ; mapped ; A693 # 5.1 CYRILLIC CAPITAL LETTER TCHE +A693 ; valid # 5.1 CYRILLIC SMALL LETTER TCHE +A694 ; mapped ; A695 # 5.1 CYRILLIC CAPITAL LETTER HWE +A695 ; valid # 5.1 CYRILLIC SMALL LETTER HWE +A696 ; mapped ; A697 # 5.1 CYRILLIC CAPITAL LETTER SHWE +A697 ; valid # 5.1 CYRILLIC SMALL LETTER SHWE +A698 ; mapped ; A699 # 7.0 CYRILLIC CAPITAL LETTER DOUBLE O +A699 ; valid # 7.0 CYRILLIC SMALL LETTER DOUBLE O +A69A ; mapped ; A69B # 7.0 CYRILLIC CAPITAL LETTER CROSSED O +A69B ; valid # 7.0 CYRILLIC SMALL LETTER CROSSED O +A69C ; mapped ; 044A # 7.0 MODIFIER LETTER CYRILLIC HARD SIGN +A69D ; mapped ; 044C # 7.0 MODIFIER LETTER CYRILLIC SOFT SIGN +A69E ; valid # 8.0 COMBINING CYRILLIC LETTER EF +A69F ; valid # 6.1 COMBINING CYRILLIC LETTER IOTIFIED E +A6A0..A6E5 ; valid # 5.2 BAMUM LETTER A..BAMUM LETTER KI +A6E6..A6EF ; valid ; ; NV8 # 5.2 BAMUM LETTER MO..BAMUM LETTER KOGHOM +A6F0..A6F1 ; valid # 5.2 BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS +A6F2..A6F7 ; valid ; ; NV8 # 5.2 BAMUM NJAEMLI..BAMUM QUESTION MARK +A6F8..A6FF ; disallowed # NA .. +A700..A716 ; valid ; ; NV8 # 4.1 MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR +A717..A71A ; valid # 5.0 MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOWER RIGHT CORNER ANGLE +A71B..A71F ; valid # 5.1 MODIFIER LETTER RAISED UP ARROW..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK +A720..A721 ; valid ; ; NV8 # 5.0 MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE +A722 ; mapped ; A723 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF +A723 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL ALEF +A724 ; mapped ; A725 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL AIN +A725 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL AIN +A726 ; mapped ; A727 # 5.1 LATIN CAPITAL LETTER HENG +A727 ; valid # 5.1 LATIN SMALL LETTER HENG +A728 ; mapped ; A729 # 5.1 LATIN CAPITAL LETTER TZ +A729 ; valid # 5.1 LATIN SMALL LETTER TZ +A72A ; mapped ; A72B # 5.1 LATIN CAPITAL LETTER TRESILLO +A72B ; valid # 5.1 LATIN SMALL LETTER TRESILLO +A72C ; mapped ; A72D # 5.1 LATIN CAPITAL LETTER CUATRILLO +A72D ; valid # 5.1 LATIN SMALL LETTER CUATRILLO +A72E ; mapped ; A72F # 5.1 LATIN CAPITAL LETTER CUATRILLO WITH COMMA +A72F..A731 ; valid # 5.1 LATIN SMALL LETTER CUATRILLO WITH COMMA..LATIN LETTER SMALL CAPITAL S +A732 ; mapped ; A733 # 5.1 LATIN CAPITAL LETTER AA +A733 ; valid # 5.1 LATIN SMALL LETTER AA +A734 ; mapped ; A735 # 5.1 LATIN CAPITAL LETTER AO +A735 ; valid # 5.1 LATIN SMALL LETTER AO +A736 ; mapped ; A737 # 5.1 LATIN CAPITAL LETTER AU +A737 ; valid # 5.1 LATIN SMALL LETTER AU +A738 ; mapped ; A739 # 5.1 LATIN CAPITAL LETTER AV +A739 ; valid # 5.1 LATIN SMALL LETTER AV +A73A ; mapped ; A73B # 5.1 LATIN CAPITAL LETTER AV WITH HORIZONTAL BAR +A73B ; valid # 5.1 LATIN SMALL LETTER AV WITH HORIZONTAL BAR +A73C ; mapped ; A73D # 5.1 LATIN CAPITAL LETTER AY +A73D ; valid # 5.1 LATIN SMALL LETTER AY +A73E ; mapped ; A73F # 5.1 LATIN CAPITAL LETTER REVERSED C WITH DOT +A73F ; valid # 5.1 LATIN SMALL LETTER REVERSED C WITH DOT +A740 ; mapped ; A741 # 5.1 LATIN CAPITAL LETTER K WITH STROKE +A741 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE +A742 ; mapped ; A743 # 5.1 LATIN CAPITAL LETTER K WITH DIAGONAL STROKE +A743 ; valid # 5.1 LATIN SMALL LETTER K WITH DIAGONAL STROKE +A744 ; mapped ; A745 # 5.1 LATIN CAPITAL LETTER K WITH STROKE AND DIAGONAL STROKE +A745 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE AND DIAGONAL STROKE +A746 ; mapped ; A747 # 5.1 LATIN CAPITAL LETTER BROKEN L +A747 ; valid # 5.1 LATIN SMALL LETTER BROKEN L +A748 ; mapped ; A749 # 5.1 LATIN CAPITAL LETTER L WITH HIGH STROKE +A749 ; valid # 5.1 LATIN SMALL LETTER L WITH HIGH STROKE +A74A ; mapped ; A74B # 5.1 LATIN CAPITAL LETTER O WITH LONG STROKE OVERLAY +A74B ; valid # 5.1 LATIN SMALL LETTER O WITH LONG STROKE OVERLAY +A74C ; mapped ; A74D # 5.1 LATIN CAPITAL LETTER O WITH LOOP +A74D ; valid # 5.1 LATIN SMALL LETTER O WITH LOOP +A74E ; mapped ; A74F # 5.1 LATIN CAPITAL LETTER OO +A74F ; valid # 5.1 LATIN SMALL LETTER OO +A750 ; mapped ; A751 # 5.1 LATIN CAPITAL LETTER P WITH STROKE THROUGH DESCENDER +A751 ; valid # 5.1 LATIN SMALL LETTER P WITH STROKE THROUGH DESCENDER +A752 ; mapped ; A753 # 5.1 LATIN CAPITAL LETTER P WITH FLOURISH +A753 ; valid # 5.1 LATIN SMALL LETTER P WITH FLOURISH +A754 ; mapped ; A755 # 5.1 LATIN CAPITAL LETTER P WITH SQUIRREL TAIL +A755 ; valid # 5.1 LATIN SMALL LETTER P WITH SQUIRREL TAIL +A756 ; mapped ; A757 # 5.1 LATIN CAPITAL LETTER Q WITH STROKE THROUGH DESCENDER +A757 ; valid # 5.1 LATIN SMALL LETTER Q WITH STROKE THROUGH DESCENDER +A758 ; mapped ; A759 # 5.1 LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE +A759 ; valid # 5.1 LATIN SMALL LETTER Q WITH DIAGONAL STROKE +A75A ; mapped ; A75B # 5.1 LATIN CAPITAL LETTER R ROTUNDA +A75B ; valid # 5.1 LATIN SMALL LETTER R ROTUNDA +A75C ; mapped ; A75D # 5.1 LATIN CAPITAL LETTER RUM ROTUNDA +A75D ; valid # 5.1 LATIN SMALL LETTER RUM ROTUNDA +A75E ; mapped ; A75F # 5.1 LATIN CAPITAL LETTER V WITH DIAGONAL STROKE +A75F ; valid # 5.1 LATIN SMALL LETTER V WITH DIAGONAL STROKE +A760 ; mapped ; A761 # 5.1 LATIN CAPITAL LETTER VY +A761 ; valid # 5.1 LATIN SMALL LETTER VY +A762 ; mapped ; A763 # 5.1 LATIN CAPITAL LETTER VISIGOTHIC Z +A763 ; valid # 5.1 LATIN SMALL LETTER VISIGOTHIC Z +A764 ; mapped ; A765 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE +A765 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE +A766 ; mapped ; A767 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE THROUGH DESCENDER +A767 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE THROUGH DESCENDER +A768 ; mapped ; A769 # 5.1 LATIN CAPITAL LETTER VEND +A769 ; valid # 5.1 LATIN SMALL LETTER VEND +A76A ; mapped ; A76B # 5.1 LATIN CAPITAL LETTER ET +A76B ; valid # 5.1 LATIN SMALL LETTER ET +A76C ; mapped ; A76D # 5.1 LATIN CAPITAL LETTER IS +A76D ; valid # 5.1 LATIN SMALL LETTER IS +A76E ; mapped ; A76F # 5.1 LATIN CAPITAL LETTER CON +A76F ; valid # 5.1 LATIN SMALL LETTER CON +A770 ; mapped ; A76F # 5.1 MODIFIER LETTER US +A771..A778 ; valid # 5.1 LATIN SMALL LETTER DUM..LATIN SMALL LETTER UM +A779 ; mapped ; A77A # 5.1 LATIN CAPITAL LETTER INSULAR D +A77A ; valid # 5.1 LATIN SMALL LETTER INSULAR D +A77B ; mapped ; A77C # 5.1 LATIN CAPITAL LETTER INSULAR F +A77C ; valid # 5.1 LATIN SMALL LETTER INSULAR F +A77D ; mapped ; 1D79 # 5.1 LATIN CAPITAL LETTER INSULAR G +A77E ; mapped ; A77F # 5.1 LATIN CAPITAL LETTER TURNED INSULAR G +A77F ; valid # 5.1 LATIN SMALL LETTER TURNED INSULAR G +A780 ; mapped ; A781 # 5.1 LATIN CAPITAL LETTER TURNED L +A781 ; valid # 5.1 LATIN SMALL LETTER TURNED L +A782 ; mapped ; A783 # 5.1 LATIN CAPITAL LETTER INSULAR R +A783 ; valid # 5.1 LATIN SMALL LETTER INSULAR R +A784 ; mapped ; A785 # 5.1 LATIN CAPITAL LETTER INSULAR S +A785 ; valid # 5.1 LATIN SMALL LETTER INSULAR S +A786 ; mapped ; A787 # 5.1 LATIN CAPITAL LETTER INSULAR T +A787..A788 ; valid # 5.1 LATIN SMALL LETTER INSULAR T..MODIFIER LETTER LOW CIRCUMFLEX ACCENT +A789..A78A ; valid ; ; NV8 # 5.1 MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN +A78B ; mapped ; A78C # 5.1 LATIN CAPITAL LETTER SALTILLO +A78C ; valid # 5.1 LATIN SMALL LETTER SALTILLO +A78D ; mapped ; 0265 # 6.0 LATIN CAPITAL LETTER TURNED H +A78E ; valid # 6.0 LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT +A78F ; valid # 8.0 LATIN LETTER SINOLOGICAL DOT +A790 ; mapped ; A791 # 6.0 LATIN CAPITAL LETTER N WITH DESCENDER +A791 ; valid # 6.0 LATIN SMALL LETTER N WITH DESCENDER +A792 ; mapped ; A793 # 6.1 LATIN CAPITAL LETTER C WITH BAR +A793 ; valid # 6.1 LATIN SMALL LETTER C WITH BAR +A794..A795 ; valid # 7.0 LATIN SMALL LETTER C WITH PALATAL HOOK..LATIN SMALL LETTER H WITH PALATAL HOOK +A796 ; mapped ; A797 # 7.0 LATIN CAPITAL LETTER B WITH FLOURISH +A797 ; valid # 7.0 LATIN SMALL LETTER B WITH FLOURISH +A798 ; mapped ; A799 # 7.0 LATIN CAPITAL LETTER F WITH STROKE +A799 ; valid # 7.0 LATIN SMALL LETTER F WITH STROKE +A79A ; mapped ; A79B # 7.0 LATIN CAPITAL LETTER VOLAPUK AE +A79B ; valid # 7.0 LATIN SMALL LETTER VOLAPUK AE +A79C ; mapped ; A79D # 7.0 LATIN CAPITAL LETTER VOLAPUK OE +A79D ; valid # 7.0 LATIN SMALL LETTER VOLAPUK OE +A79E ; mapped ; A79F # 7.0 LATIN CAPITAL LETTER VOLAPUK UE +A79F ; valid # 7.0 LATIN SMALL LETTER VOLAPUK UE +A7A0 ; mapped ; A7A1 # 6.0 LATIN CAPITAL LETTER G WITH OBLIQUE STROKE +A7A1 ; valid # 6.0 LATIN SMALL LETTER G WITH OBLIQUE STROKE +A7A2 ; mapped ; A7A3 # 6.0 LATIN CAPITAL LETTER K WITH OBLIQUE STROKE +A7A3 ; valid # 6.0 LATIN SMALL LETTER K WITH OBLIQUE STROKE +A7A4 ; mapped ; A7A5 # 6.0 LATIN CAPITAL LETTER N WITH OBLIQUE STROKE +A7A5 ; valid # 6.0 LATIN SMALL LETTER N WITH OBLIQUE STROKE +A7A6 ; mapped ; A7A7 # 6.0 LATIN CAPITAL LETTER R WITH OBLIQUE STROKE +A7A7 ; valid # 6.0 LATIN SMALL LETTER R WITH OBLIQUE STROKE +A7A8 ; mapped ; A7A9 # 6.0 LATIN CAPITAL LETTER S WITH OBLIQUE STROKE +A7A9 ; valid # 6.0 LATIN SMALL LETTER S WITH OBLIQUE STROKE +A7AA ; mapped ; 0266 # 6.1 LATIN CAPITAL LETTER H WITH HOOK +A7AB ; mapped ; 025C # 7.0 LATIN CAPITAL LETTER REVERSED OPEN E +A7AC ; mapped ; 0261 # 7.0 LATIN CAPITAL LETTER SCRIPT G +A7AD ; mapped ; 026C # 7.0 LATIN CAPITAL LETTER L WITH BELT +A7AE ; mapped ; 026A # 9.0 LATIN CAPITAL LETTER SMALL CAPITAL I +A7AF ; disallowed # NA +A7B0 ; mapped ; 029E # 7.0 LATIN CAPITAL LETTER TURNED K +A7B1 ; mapped ; 0287 # 7.0 LATIN CAPITAL LETTER TURNED T +A7B2 ; mapped ; 029D # 8.0 LATIN CAPITAL LETTER J WITH CROSSED-TAIL +A7B3 ; mapped ; AB53 # 8.0 LATIN CAPITAL LETTER CHI +A7B4 ; mapped ; A7B5 # 8.0 LATIN CAPITAL LETTER BETA +A7B5 ; valid # 8.0 LATIN SMALL LETTER BETA +A7B6 ; mapped ; A7B7 # 8.0 LATIN CAPITAL LETTER OMEGA +A7B7 ; valid # 8.0 LATIN SMALL LETTER OMEGA +A7B8..A7F6 ; disallowed # NA .. +A7F7 ; valid # 7.0 LATIN EPIGRAPHIC LETTER SIDEWAYS I +A7F8 ; mapped ; 0127 # 6.1 MODIFIER LETTER CAPITAL H WITH STROKE +A7F9 ; mapped ; 0153 # 6.1 MODIFIER LETTER SMALL LIGATURE OE +A7FA ; valid # 6.0 LATIN LETTER SMALL CAPITAL TURNED M +A7FB..A7FF ; valid # 5.1 LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M +A800..A827 ; valid # 4.1 SYLOTI NAGRI LETTER A..SYLOTI NAGRI VOWEL SIGN OO +A828..A82B ; valid ; ; NV8 # 4.1 SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4 +A82C..A82F ; disallowed # NA .. +A830..A839 ; valid ; ; NV8 # 5.2 NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC QUANTITY MARK +A83A..A83F ; disallowed # NA .. +A840..A873 ; valid # 5.0 PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU +A874..A877 ; valid ; ; NV8 # 5.0 PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD +A878..A87F ; disallowed # NA .. +A880..A8C4 ; valid # 5.1 SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VIRAMA +A8C5 ; valid # 9.0 SAURASHTRA SIGN CANDRABINDU +A8C6..A8CD ; disallowed # NA .. +A8CE..A8CF ; valid ; ; NV8 # 5.1 SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA +A8D0..A8D9 ; valid # 5.1 SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE +A8DA..A8DF ; disallowed # NA .. +A8E0..A8F7 ; valid # 5.2 COMBINING DEVANAGARI DIGIT ZERO..DEVANAGARI SIGN CANDRABINDU AVAGRAHA +A8F8..A8FA ; valid ; ; NV8 # 5.2 DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET +A8FB ; valid # 5.2 DEVANAGARI HEADSTROKE +A8FC ; valid ; ; NV8 # 8.0 DEVANAGARI SIGN SIDDHAM +A8FD ; valid # 8.0 DEVANAGARI JAIN OM +A8FE..A8FF ; disallowed # NA .. +A900..A92D ; valid # 5.1 KAYAH LI DIGIT ZERO..KAYAH LI TONE CALYA PLOPHU +A92E..A92F ; valid ; ; NV8 # 5.1 KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA +A930..A953 ; valid # 5.1 REJANG LETTER KA..REJANG VIRAMA +A954..A95E ; disallowed # NA .. +A95F ; valid ; ; NV8 # 5.1 REJANG SECTION MARK +A960..A97C ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH +A97D..A97F ; disallowed # NA .. +A980..A9C0 ; valid # 5.2 JAVANESE SIGN PANYANGGA..JAVANESE PANGKON +A9C1..A9CD ; valid ; ; NV8 # 5.2 JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH +A9CE ; disallowed # NA +A9CF..A9D9 ; valid # 5.2 JAVANESE PANGRANGKEP..JAVANESE DIGIT NINE +A9DA..A9DD ; disallowed # NA .. +A9DE..A9DF ; valid ; ; NV8 # 5.2 JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN +A9E0..A9FE ; valid # 7.0 MYANMAR LETTER SHAN GHA..MYANMAR LETTER TAI LAING BHA +A9FF ; disallowed # NA +AA00..AA36 ; valid # 5.1 CHAM LETTER A..CHAM CONSONANT SIGN WA +AA37..AA3F ; disallowed # NA .. +AA40..AA4D ; valid # 5.1 CHAM LETTER FINAL K..CHAM CONSONANT SIGN FINAL H +AA4E..AA4F ; disallowed # NA .. +AA50..AA59 ; valid # 5.1 CHAM DIGIT ZERO..CHAM DIGIT NINE +AA5A..AA5B ; disallowed # NA .. +AA5C..AA5F ; valid ; ; NV8 # 5.1 CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA +AA60..AA76 ; valid # 5.2 MYANMAR LETTER KHAMTI GA..MYANMAR LOGOGRAM KHAMTI HM +AA77..AA79 ; valid ; ; NV8 # 5.2 MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO +AA7A..AA7B ; valid # 5.2 MYANMAR LETTER AITON RA..MYANMAR SIGN PAO KAREN TONE +AA7C..AA7F ; valid # 7.0 MYANMAR SIGN TAI LAING TONE-2..MYANMAR LETTER SHWE PALAUNG SHA +AA80..AAC2 ; valid # 5.2 TAI VIET LETTER LOW KO..TAI VIET TONE MAI SONG +AAC3..AADA ; disallowed # NA .. +AADB..AADD ; valid # 5.2 TAI VIET SYMBOL KON..TAI VIET SYMBOL SAM +AADE..AADF ; valid ; ; NV8 # 5.2 TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI +AAE0..AAEF ; valid # 6.1 MEETEI MAYEK LETTER E..MEETEI MAYEK VOWEL SIGN AAU +AAF0..AAF1 ; valid ; ; NV8 # 6.1 MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM +AAF2..AAF6 ; valid # 6.1 MEETEI MAYEK ANJI..MEETEI MAYEK VIRAMA +AAF7..AB00 ; disallowed # NA .. +AB01..AB06 ; valid # 6.0 ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO +AB07..AB08 ; disallowed # NA .. +AB09..AB0E ; valid # 6.0 ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO +AB0F..AB10 ; disallowed # NA .. +AB11..AB16 ; valid # 6.0 ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO +AB17..AB1F ; disallowed # NA .. +AB20..AB26 ; valid # 6.0 ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO +AB27 ; disallowed # NA +AB28..AB2E ; valid # 6.0 ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO +AB2F ; disallowed # NA +AB30..AB5A ; valid # 7.0 LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG +AB5B ; valid ; ; NV8 # 7.0 MODIFIER BREVE WITH INVERTED BREVE +AB5C ; mapped ; A727 # 7.0 MODIFIER LETTER SMALL HENG +AB5D ; mapped ; AB37 # 7.0 MODIFIER LETTER SMALL L WITH INVERTED LAZY S +AB5E ; mapped ; 026B # 7.0 MODIFIER LETTER SMALL L WITH MIDDLE TILDE +AB5F ; mapped ; AB52 # 7.0 MODIFIER LETTER SMALL U WITH LEFT HOOK +AB60..AB63 ; valid # 8.0 LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER UO +AB64..AB65 ; valid # 7.0 LATIN SMALL LETTER INVERTED ALPHA..GREEK LETTER SMALL CAPITAL OMEGA +AB66..AB6F ; disallowed # NA .. +AB70 ; mapped ; 13A0 # 8.0 CHEROKEE SMALL LETTER A +AB71 ; mapped ; 13A1 # 8.0 CHEROKEE SMALL LETTER E +AB72 ; mapped ; 13A2 # 8.0 CHEROKEE SMALL LETTER I +AB73 ; mapped ; 13A3 # 8.0 CHEROKEE SMALL LETTER O +AB74 ; mapped ; 13A4 # 8.0 CHEROKEE SMALL LETTER U +AB75 ; mapped ; 13A5 # 8.0 CHEROKEE SMALL LETTER V +AB76 ; mapped ; 13A6 # 8.0 CHEROKEE SMALL LETTER GA +AB77 ; mapped ; 13A7 # 8.0 CHEROKEE SMALL LETTER KA +AB78 ; mapped ; 13A8 # 8.0 CHEROKEE SMALL LETTER GE +AB79 ; mapped ; 13A9 # 8.0 CHEROKEE SMALL LETTER GI +AB7A ; mapped ; 13AA # 8.0 CHEROKEE SMALL LETTER GO +AB7B ; mapped ; 13AB # 8.0 CHEROKEE SMALL LETTER GU +AB7C ; mapped ; 13AC # 8.0 CHEROKEE SMALL LETTER GV +AB7D ; mapped ; 13AD # 8.0 CHEROKEE SMALL LETTER HA +AB7E ; mapped ; 13AE # 8.0 CHEROKEE SMALL LETTER HE +AB7F ; mapped ; 13AF # 8.0 CHEROKEE SMALL LETTER HI +AB80 ; mapped ; 13B0 # 8.0 CHEROKEE SMALL LETTER HO +AB81 ; mapped ; 13B1 # 8.0 CHEROKEE SMALL LETTER HU +AB82 ; mapped ; 13B2 # 8.0 CHEROKEE SMALL LETTER HV +AB83 ; mapped ; 13B3 # 8.0 CHEROKEE SMALL LETTER LA +AB84 ; mapped ; 13B4 # 8.0 CHEROKEE SMALL LETTER LE +AB85 ; mapped ; 13B5 # 8.0 CHEROKEE SMALL LETTER LI +AB86 ; mapped ; 13B6 # 8.0 CHEROKEE SMALL LETTER LO +AB87 ; mapped ; 13B7 # 8.0 CHEROKEE SMALL LETTER LU +AB88 ; mapped ; 13B8 # 8.0 CHEROKEE SMALL LETTER LV +AB89 ; mapped ; 13B9 # 8.0 CHEROKEE SMALL LETTER MA +AB8A ; mapped ; 13BA # 8.0 CHEROKEE SMALL LETTER ME +AB8B ; mapped ; 13BB # 8.0 CHEROKEE SMALL LETTER MI +AB8C ; mapped ; 13BC # 8.0 CHEROKEE SMALL LETTER MO +AB8D ; mapped ; 13BD # 8.0 CHEROKEE SMALL LETTER MU +AB8E ; mapped ; 13BE # 8.0 CHEROKEE SMALL LETTER NA +AB8F ; mapped ; 13BF # 8.0 CHEROKEE SMALL LETTER HNA +AB90 ; mapped ; 13C0 # 8.0 CHEROKEE SMALL LETTER NAH +AB91 ; mapped ; 13C1 # 8.0 CHEROKEE SMALL LETTER NE +AB92 ; mapped ; 13C2 # 8.0 CHEROKEE SMALL LETTER NI +AB93 ; mapped ; 13C3 # 8.0 CHEROKEE SMALL LETTER NO +AB94 ; mapped ; 13C4 # 8.0 CHEROKEE SMALL LETTER NU +AB95 ; mapped ; 13C5 # 8.0 CHEROKEE SMALL LETTER NV +AB96 ; mapped ; 13C6 # 8.0 CHEROKEE SMALL LETTER QUA +AB97 ; mapped ; 13C7 # 8.0 CHEROKEE SMALL LETTER QUE +AB98 ; mapped ; 13C8 # 8.0 CHEROKEE SMALL LETTER QUI +AB99 ; mapped ; 13C9 # 8.0 CHEROKEE SMALL LETTER QUO +AB9A ; mapped ; 13CA # 8.0 CHEROKEE SMALL LETTER QUU +AB9B ; mapped ; 13CB # 8.0 CHEROKEE SMALL LETTER QUV +AB9C ; mapped ; 13CC # 8.0 CHEROKEE SMALL LETTER SA +AB9D ; mapped ; 13CD # 8.0 CHEROKEE SMALL LETTER S +AB9E ; mapped ; 13CE # 8.0 CHEROKEE SMALL LETTER SE +AB9F ; mapped ; 13CF # 8.0 CHEROKEE SMALL LETTER SI +ABA0 ; mapped ; 13D0 # 8.0 CHEROKEE SMALL LETTER SO +ABA1 ; mapped ; 13D1 # 8.0 CHEROKEE SMALL LETTER SU +ABA2 ; mapped ; 13D2 # 8.0 CHEROKEE SMALL LETTER SV +ABA3 ; mapped ; 13D3 # 8.0 CHEROKEE SMALL LETTER DA +ABA4 ; mapped ; 13D4 # 8.0 CHEROKEE SMALL LETTER TA +ABA5 ; mapped ; 13D5 # 8.0 CHEROKEE SMALL LETTER DE +ABA6 ; mapped ; 13D6 # 8.0 CHEROKEE SMALL LETTER TE +ABA7 ; mapped ; 13D7 # 8.0 CHEROKEE SMALL LETTER DI +ABA8 ; mapped ; 13D8 # 8.0 CHEROKEE SMALL LETTER TI +ABA9 ; mapped ; 13D9 # 8.0 CHEROKEE SMALL LETTER DO +ABAA ; mapped ; 13DA # 8.0 CHEROKEE SMALL LETTER DU +ABAB ; mapped ; 13DB # 8.0 CHEROKEE SMALL LETTER DV +ABAC ; mapped ; 13DC # 8.0 CHEROKEE SMALL LETTER DLA +ABAD ; mapped ; 13DD # 8.0 CHEROKEE SMALL LETTER TLA +ABAE ; mapped ; 13DE # 8.0 CHEROKEE SMALL LETTER TLE +ABAF ; mapped ; 13DF # 8.0 CHEROKEE SMALL LETTER TLI +ABB0 ; mapped ; 13E0 # 8.0 CHEROKEE SMALL LETTER TLO +ABB1 ; mapped ; 13E1 # 8.0 CHEROKEE SMALL LETTER TLU +ABB2 ; mapped ; 13E2 # 8.0 CHEROKEE SMALL LETTER TLV +ABB3 ; mapped ; 13E3 # 8.0 CHEROKEE SMALL LETTER TSA +ABB4 ; mapped ; 13E4 # 8.0 CHEROKEE SMALL LETTER TSE +ABB5 ; mapped ; 13E5 # 8.0 CHEROKEE SMALL LETTER TSI +ABB6 ; mapped ; 13E6 # 8.0 CHEROKEE SMALL LETTER TSO +ABB7 ; mapped ; 13E7 # 8.0 CHEROKEE SMALL LETTER TSU +ABB8 ; mapped ; 13E8 # 8.0 CHEROKEE SMALL LETTER TSV +ABB9 ; mapped ; 13E9 # 8.0 CHEROKEE SMALL LETTER WA +ABBA ; mapped ; 13EA # 8.0 CHEROKEE SMALL LETTER WE +ABBB ; mapped ; 13EB # 8.0 CHEROKEE SMALL LETTER WI +ABBC ; mapped ; 13EC # 8.0 CHEROKEE SMALL LETTER WO +ABBD ; mapped ; 13ED # 8.0 CHEROKEE SMALL LETTER WU +ABBE ; mapped ; 13EE # 8.0 CHEROKEE SMALL LETTER WV +ABBF ; mapped ; 13EF # 8.0 CHEROKEE SMALL LETTER YA +ABC0..ABEA ; valid # 5.2 MEETEI MAYEK LETTER KOK..MEETEI MAYEK VOWEL SIGN NUNG +ABEB ; valid ; ; NV8 # 5.2 MEETEI MAYEK CHEIKHEI +ABEC..ABED ; valid # 5.2 MEETEI MAYEK LUM IYEK..MEETEI MAYEK APUN IYEK +ABEE..ABEF ; disallowed # NA .. +ABF0..ABF9 ; valid # 5.2 MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE +ABFA..ABFF ; disallowed # NA .. +AC00..D7A3 ; valid # 2.0 HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH +D7A4..D7AF ; disallowed # NA .. +D7B0..D7C6 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E +D7C7..D7CA ; disallowed # NA .. +D7CB..D7FB ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH +D7FC..D7FF ; disallowed # NA .. +D800..DFFF ; disallowed # 2.0 .. +E000..F8FF ; disallowed # 1.1 .. +F900 ; mapped ; 8C48 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F900 +F901 ; mapped ; 66F4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F901 +F902 ; mapped ; 8ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F902 +F903 ; mapped ; 8CC8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F903 +F904 ; mapped ; 6ED1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F904 +F905 ; mapped ; 4E32 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F905 +F906 ; mapped ; 53E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F906 +F907..F908 ; mapped ; 9F9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F907..CJK COMPATIBILITY IDEOGRAPH-F908 +F909 ; mapped ; 5951 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F909 +F90A ; mapped ; 91D1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90A +F90B ; mapped ; 5587 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90B +F90C ; mapped ; 5948 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90C +F90D ; mapped ; 61F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90D +F90E ; mapped ; 7669 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90E +F90F ; mapped ; 7F85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90F +F910 ; mapped ; 863F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F910 +F911 ; mapped ; 87BA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F911 +F912 ; mapped ; 88F8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F912 +F913 ; mapped ; 908F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F913 +F914 ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F914 +F915 ; mapped ; 6D1B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F915 +F916 ; mapped ; 70D9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F916 +F917 ; mapped ; 73DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F917 +F918 ; mapped ; 843D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F918 +F919 ; mapped ; 916A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F919 +F91A ; mapped ; 99F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91A +F91B ; mapped ; 4E82 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91B +F91C ; mapped ; 5375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91C +F91D ; mapped ; 6B04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91D +F91E ; mapped ; 721B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91E +F91F ; mapped ; 862D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91F +F920 ; mapped ; 9E1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F920 +F921 ; mapped ; 5D50 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F921 +F922 ; mapped ; 6FEB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F922 +F923 ; mapped ; 85CD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F923 +F924 ; mapped ; 8964 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F924 +F925 ; mapped ; 62C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F925 +F926 ; mapped ; 81D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F926 +F927 ; mapped ; 881F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F927 +F928 ; mapped ; 5ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F928 +F929 ; mapped ; 6717 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F929 +F92A ; mapped ; 6D6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92A +F92B ; mapped ; 72FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92B +F92C ; mapped ; 90CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92C +F92D ; mapped ; 4F86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92D +F92E ; mapped ; 51B7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92E +F92F ; mapped ; 52DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92F +F930 ; mapped ; 64C4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F930 +F931 ; mapped ; 6AD3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F931 +F932 ; mapped ; 7210 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F932 +F933 ; mapped ; 76E7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F933 +F934 ; mapped ; 8001 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F934 +F935 ; mapped ; 8606 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F935 +F936 ; mapped ; 865C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F936 +F937 ; mapped ; 8DEF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F937 +F938 ; mapped ; 9732 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F938 +F939 ; mapped ; 9B6F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F939 +F93A ; mapped ; 9DFA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93A +F93B ; mapped ; 788C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93B +F93C ; mapped ; 797F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93C +F93D ; mapped ; 7DA0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93D +F93E ; mapped ; 83C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93E +F93F ; mapped ; 9304 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93F +F940 ; mapped ; 9E7F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F940 +F941 ; mapped ; 8AD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F941 +F942 ; mapped ; 58DF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F942 +F943 ; mapped ; 5F04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F943 +F944 ; mapped ; 7C60 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F944 +F945 ; mapped ; 807E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F945 +F946 ; mapped ; 7262 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F946 +F947 ; mapped ; 78CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F947 +F948 ; mapped ; 8CC2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F948 +F949 ; mapped ; 96F7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F949 +F94A ; mapped ; 58D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94A +F94B ; mapped ; 5C62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94B +F94C ; mapped ; 6A13 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94C +F94D ; mapped ; 6DDA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94D +F94E ; mapped ; 6F0F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94E +F94F ; mapped ; 7D2F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94F +F950 ; mapped ; 7E37 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F950 +F951 ; mapped ; 964B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F951 +F952 ; mapped ; 52D2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F952 +F953 ; mapped ; 808B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F953 +F954 ; mapped ; 51DC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F954 +F955 ; mapped ; 51CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F955 +F956 ; mapped ; 7A1C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F956 +F957 ; mapped ; 7DBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F957 +F958 ; mapped ; 83F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F958 +F959 ; mapped ; 9675 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F959 +F95A ; mapped ; 8B80 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95A +F95B ; mapped ; 62CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95B +F95C ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95C +F95D ; mapped ; 8AFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95D +F95E ; mapped ; 4E39 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95E +F95F ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95F +F960 ; mapped ; 6012 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F960 +F961 ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F961 +F962 ; mapped ; 7570 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F962 +F963 ; mapped ; 5317 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F963 +F964 ; mapped ; 78FB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F964 +F965 ; mapped ; 4FBF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F965 +F966 ; mapped ; 5FA9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F966 +F967 ; mapped ; 4E0D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F967 +F968 ; mapped ; 6CCC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F968 +F969 ; mapped ; 6578 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F969 +F96A ; mapped ; 7D22 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96A +F96B ; mapped ; 53C3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96B +F96C ; mapped ; 585E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96C +F96D ; mapped ; 7701 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96D +F96E ; mapped ; 8449 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96E +F96F ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96F +F970 ; mapped ; 6BBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F970 +F971 ; mapped ; 8FB0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F971 +F972 ; mapped ; 6C88 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F972 +F973 ; mapped ; 62FE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F973 +F974 ; mapped ; 82E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F974 +F975 ; mapped ; 63A0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F975 +F976 ; mapped ; 7565 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F976 +F977 ; mapped ; 4EAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F977 +F978 ; mapped ; 5169 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F978 +F979 ; mapped ; 51C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F979 +F97A ; mapped ; 6881 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97A +F97B ; mapped ; 7CE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97B +F97C ; mapped ; 826F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97C +F97D ; mapped ; 8AD2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97D +F97E ; mapped ; 91CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97E +F97F ; mapped ; 52F5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97F +F980 ; mapped ; 5442 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F980 +F981 ; mapped ; 5973 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F981 +F982 ; mapped ; 5EEC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F982 +F983 ; mapped ; 65C5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F983 +F984 ; mapped ; 6FFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F984 +F985 ; mapped ; 792A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F985 +F986 ; mapped ; 95AD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F986 +F987 ; mapped ; 9A6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F987 +F988 ; mapped ; 9E97 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F988 +F989 ; mapped ; 9ECE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F989 +F98A ; mapped ; 529B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98A +F98B ; mapped ; 66C6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98B +F98C ; mapped ; 6B77 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98C +F98D ; mapped ; 8F62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98D +F98E ; mapped ; 5E74 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98E +F98F ; mapped ; 6190 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98F +F990 ; mapped ; 6200 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F990 +F991 ; mapped ; 649A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F991 +F992 ; mapped ; 6F23 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F992 +F993 ; mapped ; 7149 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F993 +F994 ; mapped ; 7489 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F994 +F995 ; mapped ; 79CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F995 +F996 ; mapped ; 7DF4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F996 +F997 ; mapped ; 806F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F997 +F998 ; mapped ; 8F26 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F998 +F999 ; mapped ; 84EE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F999 +F99A ; mapped ; 9023 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99A +F99B ; mapped ; 934A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99B +F99C ; mapped ; 5217 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99C +F99D ; mapped ; 52A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99D +F99E ; mapped ; 54BD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99E +F99F ; mapped ; 70C8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99F +F9A0 ; mapped ; 88C2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A0 +F9A1 ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A1 +F9A2 ; mapped ; 5EC9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A2 +F9A3 ; mapped ; 5FF5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A3 +F9A4 ; mapped ; 637B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A4 +F9A5 ; mapped ; 6BAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A5 +F9A6 ; mapped ; 7C3E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A6 +F9A7 ; mapped ; 7375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A7 +F9A8 ; mapped ; 4EE4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A8 +F9A9 ; mapped ; 56F9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A9 +F9AA ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AA +F9AB ; mapped ; 5DBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AB +F9AC ; mapped ; 601C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AC +F9AD ; mapped ; 73B2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AD +F9AE ; mapped ; 7469 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AE +F9AF ; mapped ; 7F9A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AF +F9B0 ; mapped ; 8046 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B0 +F9B1 ; mapped ; 9234 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B1 +F9B2 ; mapped ; 96F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B2 +F9B3 ; mapped ; 9748 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B3 +F9B4 ; mapped ; 9818 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B4 +F9B5 ; mapped ; 4F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B5 +F9B6 ; mapped ; 79AE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B6 +F9B7 ; mapped ; 91B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B7 +F9B8 ; mapped ; 96B8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B8 +F9B9 ; mapped ; 60E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B9 +F9BA ; mapped ; 4E86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BA +F9BB ; mapped ; 50DA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BB +F9BC ; mapped ; 5BEE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BC +F9BD ; mapped ; 5C3F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BD +F9BE ; mapped ; 6599 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BE +F9BF ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BF +F9C0 ; mapped ; 71CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C0 +F9C1 ; mapped ; 7642 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C1 +F9C2 ; mapped ; 84FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C2 +F9C3 ; mapped ; 907C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C3 +F9C4 ; mapped ; 9F8D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C4 +F9C5 ; mapped ; 6688 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C5 +F9C6 ; mapped ; 962E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C6 +F9C7 ; mapped ; 5289 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C7 +F9C8 ; mapped ; 677B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C8 +F9C9 ; mapped ; 67F3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C9 +F9CA ; mapped ; 6D41 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CA +F9CB ; mapped ; 6E9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CB +F9CC ; mapped ; 7409 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CC +F9CD ; mapped ; 7559 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CD +F9CE ; mapped ; 786B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CE +F9CF ; mapped ; 7D10 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CF +F9D0 ; mapped ; 985E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D0 +F9D1 ; mapped ; 516D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D1 +F9D2 ; mapped ; 622E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D2 +F9D3 ; mapped ; 9678 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D3 +F9D4 ; mapped ; 502B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D4 +F9D5 ; mapped ; 5D19 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D5 +F9D6 ; mapped ; 6DEA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D6 +F9D7 ; mapped ; 8F2A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D7 +F9D8 ; mapped ; 5F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D8 +F9D9 ; mapped ; 6144 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D9 +F9DA ; mapped ; 6817 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DA +F9DB ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DB +F9DC ; mapped ; 9686 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DC +F9DD ; mapped ; 5229 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DD +F9DE ; mapped ; 540F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DE +F9DF ; mapped ; 5C65 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DF +F9E0 ; mapped ; 6613 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E0 +F9E1 ; mapped ; 674E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E1 +F9E2 ; mapped ; 68A8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E2 +F9E3 ; mapped ; 6CE5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E3 +F9E4 ; mapped ; 7406 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E4 +F9E5 ; mapped ; 75E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E5 +F9E6 ; mapped ; 7F79 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E6 +F9E7 ; mapped ; 88CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E7 +F9E8 ; mapped ; 88E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E8 +F9E9 ; mapped ; 91CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E9 +F9EA ; mapped ; 96E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EA +F9EB ; mapped ; 533F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EB +F9EC ; mapped ; 6EBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EC +F9ED ; mapped ; 541D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9ED +F9EE ; mapped ; 71D0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EE +F9EF ; mapped ; 7498 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EF +F9F0 ; mapped ; 85FA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F0 +F9F1 ; mapped ; 96A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F1 +F9F2 ; mapped ; 9C57 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F2 +F9F3 ; mapped ; 9E9F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F3 +F9F4 ; mapped ; 6797 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F4 +F9F5 ; mapped ; 6DCB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F5 +F9F6 ; mapped ; 81E8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F6 +F9F7 ; mapped ; 7ACB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F7 +F9F8 ; mapped ; 7B20 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F8 +F9F9 ; mapped ; 7C92 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F9 +F9FA ; mapped ; 72C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FA +F9FB ; mapped ; 7099 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FB +F9FC ; mapped ; 8B58 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FC +F9FD ; mapped ; 4EC0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FD +F9FE ; mapped ; 8336 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FE +F9FF ; mapped ; 523A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FF +FA00 ; mapped ; 5207 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA00 +FA01 ; mapped ; 5EA6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA01 +FA02 ; mapped ; 62D3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA02 +FA03 ; mapped ; 7CD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA03 +FA04 ; mapped ; 5B85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA04 +FA05 ; mapped ; 6D1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA05 +FA06 ; mapped ; 66B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA06 +FA07 ; mapped ; 8F3B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA07 +FA08 ; mapped ; 884C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA08 +FA09 ; mapped ; 964D # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA09 +FA0A ; mapped ; 898B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0A +FA0B ; mapped ; 5ED3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0B +FA0C ; mapped ; 5140 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0C +FA0D ; mapped ; 55C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0D +FA0E..FA0F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0E..CJK COMPATIBILITY IDEOGRAPH-FA0F +FA10 ; mapped ; 585A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA10 +FA11 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA11 +FA12 ; mapped ; 6674 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA12 +FA13..FA14 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA13..CJK COMPATIBILITY IDEOGRAPH-FA14 +FA15 ; mapped ; 51DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA15 +FA16 ; mapped ; 732A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA16 +FA17 ; mapped ; 76CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA17 +FA18 ; mapped ; 793C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA18 +FA19 ; mapped ; 795E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA19 +FA1A ; mapped ; 7965 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1A +FA1B ; mapped ; 798F # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1B +FA1C ; mapped ; 9756 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1C +FA1D ; mapped ; 7CBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1D +FA1E ; mapped ; 7FBD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1E +FA1F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1F +FA20 ; mapped ; 8612 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA20 +FA21 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA21 +FA22 ; mapped ; 8AF8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA22 +FA23..FA24 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA23..CJK COMPATIBILITY IDEOGRAPH-FA24 +FA25 ; mapped ; 9038 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA25 +FA26 ; mapped ; 90FD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA26 +FA27..FA29 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA27..CJK COMPATIBILITY IDEOGRAPH-FA29 +FA2A ; mapped ; 98EF # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2A +FA2B ; mapped ; 98FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2B +FA2C ; mapped ; 9928 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2C +FA2D ; mapped ; 9DB4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2D +FA2E ; mapped ; 90DE # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2E +FA2F ; mapped ; 96B7 # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2F +FA30 ; mapped ; 4FAE # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA30 +FA31 ; mapped ; 50E7 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA31 +FA32 ; mapped ; 514D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA32 +FA33 ; mapped ; 52C9 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA33 +FA34 ; mapped ; 52E4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA34 +FA35 ; mapped ; 5351 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA35 +FA36 ; mapped ; 559D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA36 +FA37 ; mapped ; 5606 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA37 +FA38 ; mapped ; 5668 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA38 +FA39 ; mapped ; 5840 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA39 +FA3A ; mapped ; 58A8 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3A +FA3B ; mapped ; 5C64 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3B +FA3C ; mapped ; 5C6E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3C +FA3D ; mapped ; 6094 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3D +FA3E ; mapped ; 6168 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3E +FA3F ; mapped ; 618E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3F +FA40 ; mapped ; 61F2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA40 +FA41 ; mapped ; 654F # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA41 +FA42 ; mapped ; 65E2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA42 +FA43 ; mapped ; 6691 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA43 +FA44 ; mapped ; 6885 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA44 +FA45 ; mapped ; 6D77 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA45 +FA46 ; mapped ; 6E1A # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA46 +FA47 ; mapped ; 6F22 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA47 +FA48 ; mapped ; 716E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA48 +FA49 ; mapped ; 722B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA49 +FA4A ; mapped ; 7422 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4A +FA4B ; mapped ; 7891 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4B +FA4C ; mapped ; 793E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4C +FA4D ; mapped ; 7949 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4D +FA4E ; mapped ; 7948 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4E +FA4F ; mapped ; 7950 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4F +FA50 ; mapped ; 7956 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA50 +FA51 ; mapped ; 795D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA51 +FA52 ; mapped ; 798D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA52 +FA53 ; mapped ; 798E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA53 +FA54 ; mapped ; 7A40 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA54 +FA55 ; mapped ; 7A81 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA55 +FA56 ; mapped ; 7BC0 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA56 +FA57 ; mapped ; 7DF4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA57 +FA58 ; mapped ; 7E09 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA58 +FA59 ; mapped ; 7E41 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA59 +FA5A ; mapped ; 7F72 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5A +FA5B ; mapped ; 8005 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5B +FA5C ; mapped ; 81ED # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5C +FA5D..FA5E ; mapped ; 8279 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5D..CJK COMPATIBILITY IDEOGRAPH-FA5E +FA5F ; mapped ; 8457 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5F +FA60 ; mapped ; 8910 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA60 +FA61 ; mapped ; 8996 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA61 +FA62 ; mapped ; 8B01 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA62 +FA63 ; mapped ; 8B39 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA63 +FA64 ; mapped ; 8CD3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA64 +FA65 ; mapped ; 8D08 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA65 +FA66 ; mapped ; 8FB6 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA66 +FA67 ; mapped ; 9038 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA67 +FA68 ; mapped ; 96E3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA68 +FA69 ; mapped ; 97FF # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA69 +FA6A ; mapped ; 983B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA6A +FA6B ; mapped ; 6075 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6B +FA6C ; mapped ; 242EE # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6C +FA6D ; mapped ; 8218 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6D +FA6E..FA6F ; disallowed # NA .. +FA70 ; mapped ; 4E26 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA70 +FA71 ; mapped ; 51B5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA71 +FA72 ; mapped ; 5168 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA72 +FA73 ; mapped ; 4F80 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA73 +FA74 ; mapped ; 5145 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA74 +FA75 ; mapped ; 5180 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA75 +FA76 ; mapped ; 52C7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA76 +FA77 ; mapped ; 52FA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA77 +FA78 ; mapped ; 559D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA78 +FA79 ; mapped ; 5555 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA79 +FA7A ; mapped ; 5599 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7A +FA7B ; mapped ; 55E2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7B +FA7C ; mapped ; 585A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7C +FA7D ; mapped ; 58B3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7D +FA7E ; mapped ; 5944 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7E +FA7F ; mapped ; 5954 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7F +FA80 ; mapped ; 5A62 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA80 +FA81 ; mapped ; 5B28 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA81 +FA82 ; mapped ; 5ED2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA82 +FA83 ; mapped ; 5ED9 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA83 +FA84 ; mapped ; 5F69 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA84 +FA85 ; mapped ; 5FAD # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA85 +FA86 ; mapped ; 60D8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA86 +FA87 ; mapped ; 614E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA87 +FA88 ; mapped ; 6108 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA88 +FA89 ; mapped ; 618E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA89 +FA8A ; mapped ; 6160 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8A +FA8B ; mapped ; 61F2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8B +FA8C ; mapped ; 6234 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8C +FA8D ; mapped ; 63C4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8D +FA8E ; mapped ; 641C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8E +FA8F ; mapped ; 6452 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8F +FA90 ; mapped ; 6556 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA90 +FA91 ; mapped ; 6674 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA91 +FA92 ; mapped ; 6717 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA92 +FA93 ; mapped ; 671B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA93 +FA94 ; mapped ; 6756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA94 +FA95 ; mapped ; 6B79 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA95 +FA96 ; mapped ; 6BBA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA96 +FA97 ; mapped ; 6D41 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA97 +FA98 ; mapped ; 6EDB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA98 +FA99 ; mapped ; 6ECB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA99 +FA9A ; mapped ; 6F22 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9A +FA9B ; mapped ; 701E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9B +FA9C ; mapped ; 716E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9C +FA9D ; mapped ; 77A7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9D +FA9E ; mapped ; 7235 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9E +FA9F ; mapped ; 72AF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9F +FAA0 ; mapped ; 732A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA0 +FAA1 ; mapped ; 7471 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA1 +FAA2 ; mapped ; 7506 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA2 +FAA3 ; mapped ; 753B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA3 +FAA4 ; mapped ; 761D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA4 +FAA5 ; mapped ; 761F # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA5 +FAA6 ; mapped ; 76CA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA6 +FAA7 ; mapped ; 76DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA7 +FAA8 ; mapped ; 76F4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA8 +FAA9 ; mapped ; 774A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA9 +FAAA ; mapped ; 7740 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAA +FAAB ; mapped ; 78CC # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAB +FAAC ; mapped ; 7AB1 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAC +FAAD ; mapped ; 7BC0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAD +FAAE ; mapped ; 7C7B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAE +FAAF ; mapped ; 7D5B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAF +FAB0 ; mapped ; 7DF4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB0 +FAB1 ; mapped ; 7F3E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB1 +FAB2 ; mapped ; 8005 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB2 +FAB3 ; mapped ; 8352 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB3 +FAB4 ; mapped ; 83EF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB4 +FAB5 ; mapped ; 8779 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB5 +FAB6 ; mapped ; 8941 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB6 +FAB7 ; mapped ; 8986 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB7 +FAB8 ; mapped ; 8996 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB8 +FAB9 ; mapped ; 8ABF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB9 +FABA ; mapped ; 8AF8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABA +FABB ; mapped ; 8ACB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABB +FABC ; mapped ; 8B01 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABC +FABD ; mapped ; 8AFE # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABD +FABE ; mapped ; 8AED # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABE +FABF ; mapped ; 8B39 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABF +FAC0 ; mapped ; 8B8A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC0 +FAC1 ; mapped ; 8D08 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC1 +FAC2 ; mapped ; 8F38 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC2 +FAC3 ; mapped ; 9072 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC3 +FAC4 ; mapped ; 9199 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC4 +FAC5 ; mapped ; 9276 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC5 +FAC6 ; mapped ; 967C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC6 +FAC7 ; mapped ; 96E3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC7 +FAC8 ; mapped ; 9756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC8 +FAC9 ; mapped ; 97DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC9 +FACA ; mapped ; 97FF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACA +FACB ; mapped ; 980B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACB +FACC ; mapped ; 983B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACC +FACD ; mapped ; 9B12 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACD +FACE ; mapped ; 9F9C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACE +FACF ; mapped ; 2284A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACF +FAD0 ; mapped ; 22844 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD0 +FAD1 ; mapped ; 233D5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD1 +FAD2 ; mapped ; 3B9D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD2 +FAD3 ; mapped ; 4018 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD3 +FAD4 ; mapped ; 4039 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD4 +FAD5 ; mapped ; 25249 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD5 +FAD6 ; mapped ; 25CD0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD6 +FAD7 ; mapped ; 27ED3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD7 +FAD8 ; mapped ; 9F43 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD8 +FAD9 ; mapped ; 9F8E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD9 +FADA..FAFF ; disallowed # NA .. +FB00 ; mapped ; 0066 0066 # 1.1 LATIN SMALL LIGATURE FF +FB01 ; mapped ; 0066 0069 # 1.1 LATIN SMALL LIGATURE FI +FB02 ; mapped ; 0066 006C # 1.1 LATIN SMALL LIGATURE FL +FB03 ; mapped ; 0066 0066 0069 #1.1 LATIN SMALL LIGATURE FFI +FB04 ; mapped ; 0066 0066 006C #1.1 LATIN SMALL LIGATURE FFL +FB05..FB06 ; mapped ; 0073 0074 # 1.1 LATIN SMALL LIGATURE LONG S T..LATIN SMALL LIGATURE ST +FB07..FB12 ; disallowed # NA .. +FB13 ; mapped ; 0574 0576 # 1.1 ARMENIAN SMALL LIGATURE MEN NOW +FB14 ; mapped ; 0574 0565 # 1.1 ARMENIAN SMALL LIGATURE MEN ECH +FB15 ; mapped ; 0574 056B # 1.1 ARMENIAN SMALL LIGATURE MEN INI +FB16 ; mapped ; 057E 0576 # 1.1 ARMENIAN SMALL LIGATURE VEW NOW +FB17 ; mapped ; 0574 056D # 1.1 ARMENIAN SMALL LIGATURE MEN XEH +FB18..FB1C ; disallowed # NA .. +FB1D ; mapped ; 05D9 05B4 # 3.0 HEBREW LETTER YOD WITH HIRIQ +FB1E ; valid # 1.1 HEBREW POINT JUDEO-SPANISH VARIKA +FB1F ; mapped ; 05F2 05B7 # 1.1 HEBREW LIGATURE YIDDISH YOD YOD PATAH +FB20 ; mapped ; 05E2 # 1.1 HEBREW LETTER ALTERNATIVE AYIN +FB21 ; mapped ; 05D0 # 1.1 HEBREW LETTER WIDE ALEF +FB22 ; mapped ; 05D3 # 1.1 HEBREW LETTER WIDE DALET +FB23 ; mapped ; 05D4 # 1.1 HEBREW LETTER WIDE HE +FB24 ; mapped ; 05DB # 1.1 HEBREW LETTER WIDE KAF +FB25 ; mapped ; 05DC # 1.1 HEBREW LETTER WIDE LAMED +FB26 ; mapped ; 05DD # 1.1 HEBREW LETTER WIDE FINAL MEM +FB27 ; mapped ; 05E8 # 1.1 HEBREW LETTER WIDE RESH +FB28 ; mapped ; 05EA # 1.1 HEBREW LETTER WIDE TAV +FB29 ; disallowed_STD3_mapped ; 002B # 1.1 HEBREW LETTER ALTERNATIVE PLUS SIGN +FB2A ; mapped ; 05E9 05C1 # 1.1 HEBREW LETTER SHIN WITH SHIN DOT +FB2B ; mapped ; 05E9 05C2 # 1.1 HEBREW LETTER SHIN WITH SIN DOT +FB2C ; mapped ; 05E9 05BC 05C1 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT +FB2D ; mapped ; 05E9 05BC 05C2 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SIN DOT +FB2E ; mapped ; 05D0 05B7 # 1.1 HEBREW LETTER ALEF WITH PATAH +FB2F ; mapped ; 05D0 05B8 # 1.1 HEBREW LETTER ALEF WITH QAMATS +FB30 ; mapped ; 05D0 05BC # 1.1 HEBREW LETTER ALEF WITH MAPIQ +FB31 ; mapped ; 05D1 05BC # 1.1 HEBREW LETTER BET WITH DAGESH +FB32 ; mapped ; 05D2 05BC # 1.1 HEBREW LETTER GIMEL WITH DAGESH +FB33 ; mapped ; 05D3 05BC # 1.1 HEBREW LETTER DALET WITH DAGESH +FB34 ; mapped ; 05D4 05BC # 1.1 HEBREW LETTER HE WITH MAPIQ +FB35 ; mapped ; 05D5 05BC # 1.1 HEBREW LETTER VAV WITH DAGESH +FB36 ; mapped ; 05D6 05BC # 1.1 HEBREW LETTER ZAYIN WITH DAGESH +FB37 ; disallowed # NA +FB38 ; mapped ; 05D8 05BC # 1.1 HEBREW LETTER TET WITH DAGESH +FB39 ; mapped ; 05D9 05BC # 1.1 HEBREW LETTER YOD WITH DAGESH +FB3A ; mapped ; 05DA 05BC # 1.1 HEBREW LETTER FINAL KAF WITH DAGESH +FB3B ; mapped ; 05DB 05BC # 1.1 HEBREW LETTER KAF WITH DAGESH +FB3C ; mapped ; 05DC 05BC # 1.1 HEBREW LETTER LAMED WITH DAGESH +FB3D ; disallowed # NA +FB3E ; mapped ; 05DE 05BC # 1.1 HEBREW LETTER MEM WITH DAGESH +FB3F ; disallowed # NA +FB40 ; mapped ; 05E0 05BC # 1.1 HEBREW LETTER NUN WITH DAGESH +FB41 ; mapped ; 05E1 05BC # 1.1 HEBREW LETTER SAMEKH WITH DAGESH +FB42 ; disallowed # NA +FB43 ; mapped ; 05E3 05BC # 1.1 HEBREW LETTER FINAL PE WITH DAGESH +FB44 ; mapped ; 05E4 05BC # 1.1 HEBREW LETTER PE WITH DAGESH +FB45 ; disallowed # NA +FB46 ; mapped ; 05E6 05BC # 1.1 HEBREW LETTER TSADI WITH DAGESH +FB47 ; mapped ; 05E7 05BC # 1.1 HEBREW LETTER QOF WITH DAGESH +FB48 ; mapped ; 05E8 05BC # 1.1 HEBREW LETTER RESH WITH DAGESH +FB49 ; mapped ; 05E9 05BC # 1.1 HEBREW LETTER SHIN WITH DAGESH +FB4A ; mapped ; 05EA 05BC # 1.1 HEBREW LETTER TAV WITH DAGESH +FB4B ; mapped ; 05D5 05B9 # 1.1 HEBREW LETTER VAV WITH HOLAM +FB4C ; mapped ; 05D1 05BF # 1.1 HEBREW LETTER BET WITH RAFE +FB4D ; mapped ; 05DB 05BF # 1.1 HEBREW LETTER KAF WITH RAFE +FB4E ; mapped ; 05E4 05BF # 1.1 HEBREW LETTER PE WITH RAFE +FB4F ; mapped ; 05D0 05DC # 1.1 HEBREW LIGATURE ALEF LAMED +FB50..FB51 ; mapped ; 0671 # 1.1 ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER ALEF WASLA FINAL FORM +FB52..FB55 ; mapped ; 067B # 1.1 ARABIC LETTER BEEH ISOLATED FORM..ARABIC LETTER BEEH MEDIAL FORM +FB56..FB59 ; mapped ; 067E # 1.1 ARABIC LETTER PEH ISOLATED FORM..ARABIC LETTER PEH MEDIAL FORM +FB5A..FB5D ; mapped ; 0680 # 1.1 ARABIC LETTER BEHEH ISOLATED FORM..ARABIC LETTER BEHEH MEDIAL FORM +FB5E..FB61 ; mapped ; 067A # 1.1 ARABIC LETTER TTEHEH ISOLATED FORM..ARABIC LETTER TTEHEH MEDIAL FORM +FB62..FB65 ; mapped ; 067F # 1.1 ARABIC LETTER TEHEH ISOLATED FORM..ARABIC LETTER TEHEH MEDIAL FORM +FB66..FB69 ; mapped ; 0679 # 1.1 ARABIC LETTER TTEH ISOLATED FORM..ARABIC LETTER TTEH MEDIAL FORM +FB6A..FB6D ; mapped ; 06A4 # 1.1 ARABIC LETTER VEH ISOLATED FORM..ARABIC LETTER VEH MEDIAL FORM +FB6E..FB71 ; mapped ; 06A6 # 1.1 ARABIC LETTER PEHEH ISOLATED FORM..ARABIC LETTER PEHEH MEDIAL FORM +FB72..FB75 ; mapped ; 0684 # 1.1 ARABIC LETTER DYEH ISOLATED FORM..ARABIC LETTER DYEH MEDIAL FORM +FB76..FB79 ; mapped ; 0683 # 1.1 ARABIC LETTER NYEH ISOLATED FORM..ARABIC LETTER NYEH MEDIAL FORM +FB7A..FB7D ; mapped ; 0686 # 1.1 ARABIC LETTER TCHEH ISOLATED FORM..ARABIC LETTER TCHEH MEDIAL FORM +FB7E..FB81 ; mapped ; 0687 # 1.1 ARABIC LETTER TCHEHEH ISOLATED FORM..ARABIC LETTER TCHEHEH MEDIAL FORM +FB82..FB83 ; mapped ; 068D # 1.1 ARABIC LETTER DDAHAL ISOLATED FORM..ARABIC LETTER DDAHAL FINAL FORM +FB84..FB85 ; mapped ; 068C # 1.1 ARABIC LETTER DAHAL ISOLATED FORM..ARABIC LETTER DAHAL FINAL FORM +FB86..FB87 ; mapped ; 068E # 1.1 ARABIC LETTER DUL ISOLATED FORM..ARABIC LETTER DUL FINAL FORM +FB88..FB89 ; mapped ; 0688 # 1.1 ARABIC LETTER DDAL ISOLATED FORM..ARABIC LETTER DDAL FINAL FORM +FB8A..FB8B ; mapped ; 0698 # 1.1 ARABIC LETTER JEH ISOLATED FORM..ARABIC LETTER JEH FINAL FORM +FB8C..FB8D ; mapped ; 0691 # 1.1 ARABIC LETTER RREH ISOLATED FORM..ARABIC LETTER RREH FINAL FORM +FB8E..FB91 ; mapped ; 06A9 # 1.1 ARABIC LETTER KEHEH ISOLATED FORM..ARABIC LETTER KEHEH MEDIAL FORM +FB92..FB95 ; mapped ; 06AF # 1.1 ARABIC LETTER GAF ISOLATED FORM..ARABIC LETTER GAF MEDIAL FORM +FB96..FB99 ; mapped ; 06B3 # 1.1 ARABIC LETTER GUEH ISOLATED FORM..ARABIC LETTER GUEH MEDIAL FORM +FB9A..FB9D ; mapped ; 06B1 # 1.1 ARABIC LETTER NGOEH ISOLATED FORM..ARABIC LETTER NGOEH MEDIAL FORM +FB9E..FB9F ; mapped ; 06BA # 1.1 ARABIC LETTER NOON GHUNNA ISOLATED FORM..ARABIC LETTER NOON GHUNNA FINAL FORM +FBA0..FBA3 ; mapped ; 06BB # 1.1 ARABIC LETTER RNOON ISOLATED FORM..ARABIC LETTER RNOON MEDIAL FORM +FBA4..FBA5 ; mapped ; 06C0 # 1.1 ARABIC LETTER HEH WITH YEH ABOVE ISOLATED FORM..ARABIC LETTER HEH WITH YEH ABOVE FINAL FORM +FBA6..FBA9 ; mapped ; 06C1 # 1.1 ARABIC LETTER HEH GOAL ISOLATED FORM..ARABIC LETTER HEH GOAL MEDIAL FORM +FBAA..FBAD ; mapped ; 06BE # 1.1 ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM..ARABIC LETTER HEH DOACHASHMEE MEDIAL FORM +FBAE..FBAF ; mapped ; 06D2 # 1.1 ARABIC LETTER YEH BARREE ISOLATED FORM..ARABIC LETTER YEH BARREE FINAL FORM +FBB0..FBB1 ; mapped ; 06D3 # 1.1 ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM +FBB2..FBC1 ; valid ; ; NV8 # 6.0 ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL SMALL TAH BELOW +FBC2..FBD2 ; disallowed # NA .. +FBD3..FBD6 ; mapped ; 06AD # 1.1 ARABIC LETTER NG ISOLATED FORM..ARABIC LETTER NG MEDIAL FORM +FBD7..FBD8 ; mapped ; 06C7 # 1.1 ARABIC LETTER U ISOLATED FORM..ARABIC LETTER U FINAL FORM +FBD9..FBDA ; mapped ; 06C6 # 1.1 ARABIC LETTER OE ISOLATED FORM..ARABIC LETTER OE FINAL FORM +FBDB..FBDC ; mapped ; 06C8 # 1.1 ARABIC LETTER YU ISOLATED FORM..ARABIC LETTER YU FINAL FORM +FBDD ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE ISOLATED FORM +FBDE..FBDF ; mapped ; 06CB # 1.1 ARABIC LETTER VE ISOLATED FORM..ARABIC LETTER VE FINAL FORM +FBE0..FBE1 ; mapped ; 06C5 # 1.1 ARABIC LETTER KIRGHIZ OE ISOLATED FORM..ARABIC LETTER KIRGHIZ OE FINAL FORM +FBE2..FBE3 ; mapped ; 06C9 # 1.1 ARABIC LETTER KIRGHIZ YU ISOLATED FORM..ARABIC LETTER KIRGHIZ YU FINAL FORM +FBE4..FBE7 ; mapped ; 06D0 # 1.1 ARABIC LETTER E ISOLATED FORM..ARABIC LETTER E MEDIAL FORM +FBE8..FBE9 ; mapped ; 0649 # 1.1 ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA INITIAL FORM..ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA MEDIAL FORM +FBEA..FBEB ; mapped ; 0626 0627 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF FINAL FORM +FBEC..FBED ; mapped ; 0626 06D5 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE FINAL FORM +FBEE..FBEF ; mapped ; 0626 0648 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW FINAL FORM +FBF0..FBF1 ; mapped ; 0626 06C7 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U FINAL FORM +FBF2..FBF3 ; mapped ; 0626 06C6 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE FINAL FORM +FBF4..FBF5 ; mapped ; 0626 06C8 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU FINAL FORM +FBF6..FBF8 ; mapped ; 0626 06D0 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E INITIAL FORM +FBF9..FBFB ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM..ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA INITIAL FORM +FBFC..FBFF ; mapped ; 06CC # 1.1 ARABIC LETTER FARSI YEH ISOLATED FORM..ARABIC LETTER FARSI YEH MEDIAL FORM +FC00 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM ISOLATED FORM +FC01 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH ISOLATED FORM +FC02 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM ISOLATED FORM +FC03 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM +FC04 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH ISOLATED FORM +FC05 ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM ISOLATED FORM +FC06 ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH ISOLATED FORM +FC07 ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH ISOLATED FORM +FC08 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM ISOLATED FORM +FC09 ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA ISOLATED FORM +FC0A ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH ISOLATED FORM +FC0B ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM ISOLATED FORM +FC0C ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH ISOLATED FORM +FC0D ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH ISOLATED FORM +FC0E ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM ISOLATED FORM +FC0F ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA ISOLATED FORM +FC10 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH ISOLATED FORM +FC11 ; mapped ; 062B 062C # 1.1 ARABIC LIGATURE THEH WITH JEEM ISOLATED FORM +FC12 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM ISOLATED FORM +FC13 ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA ISOLATED FORM +FC14 ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH ISOLATED FORM +FC15 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH ISOLATED FORM +FC16 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM ISOLATED FORM +FC17 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM ISOLATED FORM +FC18 ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM ISOLATED FORM +FC19 ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM ISOLATED FORM +FC1A ; mapped ; 062E 062D # 1.1 ARABIC LIGATURE KHAH WITH HAH ISOLATED FORM +FC1B ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM ISOLATED FORM +FC1C ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM ISOLATED FORM +FC1D ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH ISOLATED FORM +FC1E ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH ISOLATED FORM +FC1F ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM ISOLATED FORM +FC20 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH ISOLATED FORM +FC21 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM ISOLATED FORM +FC22 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM ISOLATED FORM +FC23 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH ISOLATED FORM +FC24 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH ISOLATED FORM +FC25 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM ISOLATED FORM +FC26 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH ISOLATED FORM +FC27 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM ISOLATED FORM +FC28 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM ISOLATED FORM +FC29 ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM ISOLATED FORM +FC2A ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM ISOLATED FORM +FC2B ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM ISOLATED FORM +FC2C ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM ISOLATED FORM +FC2D ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM ISOLATED FORM +FC2E ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH ISOLATED FORM +FC2F ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH ISOLATED FORM +FC30 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM ISOLATED FORM +FC31 ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA ISOLATED FORM +FC32 ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH ISOLATED FORM +FC33 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH ISOLATED FORM +FC34 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM ISOLATED FORM +FC35 ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA ISOLATED FORM +FC36 ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH ISOLATED FORM +FC37 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF ISOLATED FORM +FC38 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM ISOLATED FORM +FC39 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH ISOLATED FORM +FC3A ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH ISOLATED FORM +FC3B ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM ISOLATED FORM +FC3C ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM ISOLATED FORM +FC3D ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA ISOLATED FORM +FC3E ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH ISOLATED FORM +FC3F ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM ISOLATED FORM +FC40 ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH ISOLATED FORM +FC41 ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH ISOLATED FORM +FC42 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM ISOLATED FORM +FC43 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA ISOLATED FORM +FC44 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH ISOLATED FORM +FC45 ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM ISOLATED FORM +FC46 ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH ISOLATED FORM +FC47 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH ISOLATED FORM +FC48 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM ISOLATED FORM +FC49 ; mapped ; 0645 0649 # 1.1 ARABIC LIGATURE MEEM WITH ALEF MAKSURA ISOLATED FORM +FC4A ; mapped ; 0645 064A # 1.1 ARABIC LIGATURE MEEM WITH YEH ISOLATED FORM +FC4B ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM ISOLATED FORM +FC4C ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH ISOLATED FORM +FC4D ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH ISOLATED FORM +FC4E ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM ISOLATED FORM +FC4F ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA ISOLATED FORM +FC50 ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH ISOLATED FORM +FC51 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM ISOLATED FORM +FC52 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM ISOLATED FORM +FC53 ; mapped ; 0647 0649 # 1.1 ARABIC LIGATURE HEH WITH ALEF MAKSURA ISOLATED FORM +FC54 ; mapped ; 0647 064A # 1.1 ARABIC LIGATURE HEH WITH YEH ISOLATED FORM +FC55 ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM ISOLATED FORM +FC56 ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH ISOLATED FORM +FC57 ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH ISOLATED FORM +FC58 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM ISOLATED FORM +FC59 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA ISOLATED FORM +FC5A ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH ISOLATED FORM +FC5B ; mapped ; 0630 0670 # 1.1 ARABIC LIGATURE THAL WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5C ; mapped ; 0631 0670 # 1.1 ARABIC LIGATURE REH WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5D ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5E ; disallowed_STD3_mapped ; 0020 064C 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMATAN ISOLATED FORM +FC5F ; disallowed_STD3_mapped ; 0020 064D 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRATAN ISOLATED FORM +FC60 ; disallowed_STD3_mapped ; 0020 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA ISOLATED FORM +FC61 ; disallowed_STD3_mapped ; 0020 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA ISOLATED FORM +FC62 ; disallowed_STD3_mapped ; 0020 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA ISOLATED FORM +FC63 ; disallowed_STD3_mapped ; 0020 0651 0670 #1.1 ARABIC LIGATURE SHADDA WITH SUPERSCRIPT ALEF ISOLATED FORM +FC64 ; mapped ; 0626 0631 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH REH FINAL FORM +FC65 ; mapped ; 0626 0632 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ZAIN FINAL FORM +FC66 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM FINAL FORM +FC67 ; mapped ; 0626 0646 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH NOON FINAL FORM +FC68 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA FINAL FORM +FC69 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH FINAL FORM +FC6A ; mapped ; 0628 0631 # 1.1 ARABIC LIGATURE BEH WITH REH FINAL FORM +FC6B ; mapped ; 0628 0632 # 1.1 ARABIC LIGATURE BEH WITH ZAIN FINAL FORM +FC6C ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM FINAL FORM +FC6D ; mapped ; 0628 0646 # 1.1 ARABIC LIGATURE BEH WITH NOON FINAL FORM +FC6E ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA FINAL FORM +FC6F ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH FINAL FORM +FC70 ; mapped ; 062A 0631 # 1.1 ARABIC LIGATURE TEH WITH REH FINAL FORM +FC71 ; mapped ; 062A 0632 # 1.1 ARABIC LIGATURE TEH WITH ZAIN FINAL FORM +FC72 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM FINAL FORM +FC73 ; mapped ; 062A 0646 # 1.1 ARABIC LIGATURE TEH WITH NOON FINAL FORM +FC74 ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA FINAL FORM +FC75 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH FINAL FORM +FC76 ; mapped ; 062B 0631 # 1.1 ARABIC LIGATURE THEH WITH REH FINAL FORM +FC77 ; mapped ; 062B 0632 # 1.1 ARABIC LIGATURE THEH WITH ZAIN FINAL FORM +FC78 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM FINAL FORM +FC79 ; mapped ; 062B 0646 # 1.1 ARABIC LIGATURE THEH WITH NOON FINAL FORM +FC7A ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA FINAL FORM +FC7B ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH FINAL FORM +FC7C ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA FINAL FORM +FC7D ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH FINAL FORM +FC7E ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA FINAL FORM +FC7F ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH FINAL FORM +FC80 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF FINAL FORM +FC81 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM FINAL FORM +FC82 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM FINAL FORM +FC83 ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA FINAL FORM +FC84 ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH FINAL FORM +FC85 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM FINAL FORM +FC86 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA FINAL FORM +FC87 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH FINAL FORM +FC88 ; mapped ; 0645 0627 # 1.1 ARABIC LIGATURE MEEM WITH ALEF FINAL FORM +FC89 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM FINAL FORM +FC8A ; mapped ; 0646 0631 # 1.1 ARABIC LIGATURE NOON WITH REH FINAL FORM +FC8B ; mapped ; 0646 0632 # 1.1 ARABIC LIGATURE NOON WITH ZAIN FINAL FORM +FC8C ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM FINAL FORM +FC8D ; mapped ; 0646 0646 # 1.1 ARABIC LIGATURE NOON WITH NOON FINAL FORM +FC8E ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA FINAL FORM +FC8F ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH FINAL FORM +FC90 ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF FINAL FORM +FC91 ; mapped ; 064A 0631 # 1.1 ARABIC LIGATURE YEH WITH REH FINAL FORM +FC92 ; mapped ; 064A 0632 # 1.1 ARABIC LIGATURE YEH WITH ZAIN FINAL FORM +FC93 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM FINAL FORM +FC94 ; mapped ; 064A 0646 # 1.1 ARABIC LIGATURE YEH WITH NOON FINAL FORM +FC95 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA FINAL FORM +FC96 ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH FINAL FORM +FC97 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM INITIAL FORM +FC98 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH INITIAL FORM +FC99 ; mapped ; 0626 062E # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH KHAH INITIAL FORM +FC9A ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM INITIAL FORM +FC9B ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH INITIAL FORM +FC9C ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM INITIAL FORM +FC9D ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH INITIAL FORM +FC9E ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH INITIAL FORM +FC9F ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM INITIAL FORM +FCA0 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH INITIAL FORM +FCA1 ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM INITIAL FORM +FCA2 ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH INITIAL FORM +FCA3 ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH INITIAL FORM +FCA4 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM INITIAL FORM +FCA5 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH INITIAL FORM +FCA6 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM INITIAL FORM +FCA7 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH INITIAL FORM +FCA8 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM INITIAL FORM +FCA9 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM INITIAL FORM +FCAA ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM INITIAL FORM +FCAB ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM INITIAL FORM +FCAC ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM INITIAL FORM +FCAD ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM INITIAL FORM +FCAE ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH INITIAL FORM +FCAF ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH INITIAL FORM +FCB0 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM INITIAL FORM +FCB1 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH INITIAL FORM +FCB2 ; mapped ; 0635 062E # 1.1 ARABIC LIGATURE SAD WITH KHAH INITIAL FORM +FCB3 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM INITIAL FORM +FCB4 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM INITIAL FORM +FCB5 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH INITIAL FORM +FCB6 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH INITIAL FORM +FCB7 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM INITIAL FORM +FCB8 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH INITIAL FORM +FCB9 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM INITIAL FORM +FCBA ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM INITIAL FORM +FCBB ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM INITIAL FORM +FCBC ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM INITIAL FORM +FCBD ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM INITIAL FORM +FCBE ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM INITIAL FORM +FCBF ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH INITIAL FORM +FCC0 ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH INITIAL FORM +FCC1 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM INITIAL FORM +FCC2 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH INITIAL FORM +FCC3 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM INITIAL FORM +FCC4 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM INITIAL FORM +FCC5 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH INITIAL FORM +FCC6 ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH INITIAL FORM +FCC7 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM INITIAL FORM +FCC8 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM INITIAL FORM +FCC9 ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM INITIAL FORM +FCCA ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH INITIAL FORM +FCCB ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH INITIAL FORM +FCCC ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM INITIAL FORM +FCCD ; mapped ; 0644 0647 # 1.1 ARABIC LIGATURE LAM WITH HEH INITIAL FORM +FCCE ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM INITIAL FORM +FCCF ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH INITIAL FORM +FCD0 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH INITIAL FORM +FCD1 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM INITIAL FORM +FCD2 ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM INITIAL FORM +FCD3 ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH INITIAL FORM +FCD4 ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH INITIAL FORM +FCD5 ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM INITIAL FORM +FCD6 ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH INITIAL FORM +FCD7 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM INITIAL FORM +FCD8 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM INITIAL FORM +FCD9 ; mapped ; 0647 0670 # 1.1 ARABIC LIGATURE HEH WITH SUPERSCRIPT ALEF INITIAL FORM +FCDA ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM INITIAL FORM +FCDB ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH INITIAL FORM +FCDC ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH INITIAL FORM +FCDD ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM INITIAL FORM +FCDE ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH INITIAL FORM +FCDF ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM MEDIAL FORM +FCE0 ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH MEDIAL FORM +FCE1 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM MEDIAL FORM +FCE2 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH MEDIAL FORM +FCE3 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM MEDIAL FORM +FCE4 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH MEDIAL FORM +FCE5 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM MEDIAL FORM +FCE6 ; mapped ; 062B 0647 # 1.1 ARABIC LIGATURE THEH WITH HEH MEDIAL FORM +FCE7 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM MEDIAL FORM +FCE8 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH MEDIAL FORM +FCE9 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM MEDIAL FORM +FCEA ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH MEDIAL FORM +FCEB ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM MEDIAL FORM +FCEC ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM MEDIAL FORM +FCED ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM MEDIAL FORM +FCEE ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM MEDIAL FORM +FCEF ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH MEDIAL FORM +FCF0 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM MEDIAL FORM +FCF1 ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH MEDIAL FORM +FCF2 ; mapped ; 0640 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA MEDIAL FORM +FCF3 ; mapped ; 0640 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA MEDIAL FORM +FCF4 ; mapped ; 0640 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA MEDIAL FORM +FCF5 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA ISOLATED FORM +FCF6 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH ISOLATED FORM +FCF7 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA ISOLATED FORM +FCF8 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH ISOLATED FORM +FCF9 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA ISOLATED FORM +FCFA ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH ISOLATED FORM +FCFB ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA ISOLATED FORM +FCFC ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH ISOLATED FORM +FCFD ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA ISOLATED FORM +FCFE ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH ISOLATED FORM +FCFF ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA ISOLATED FORM +FD00 ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH ISOLATED FORM +FD01 ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA ISOLATED FORM +FD02 ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH ISOLATED FORM +FD03 ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA ISOLATED FORM +FD04 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH ISOLATED FORM +FD05 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA ISOLATED FORM +FD06 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH ISOLATED FORM +FD07 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA ISOLATED FORM +FD08 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH ISOLATED FORM +FD09 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM ISOLATED FORM +FD0A ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH ISOLATED FORM +FD0B ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH ISOLATED FORM +FD0C ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM ISOLATED FORM +FD0D ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH ISOLATED FORM +FD0E ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH ISOLATED FORM +FD0F ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH ISOLATED FORM +FD10 ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH ISOLATED FORM +FD11 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA FINAL FORM +FD12 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH FINAL FORM +FD13 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA FINAL FORM +FD14 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH FINAL FORM +FD15 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA FINAL FORM +FD16 ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH FINAL FORM +FD17 ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA FINAL FORM +FD18 ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH FINAL FORM +FD19 ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA FINAL FORM +FD1A ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH FINAL FORM +FD1B ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA FINAL FORM +FD1C ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH FINAL FORM +FD1D ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA FINAL FORM +FD1E ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH FINAL FORM +FD1F ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA FINAL FORM +FD20 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH FINAL FORM +FD21 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA FINAL FORM +FD22 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH FINAL FORM +FD23 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA FINAL FORM +FD24 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH FINAL FORM +FD25 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM FINAL FORM +FD26 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH FINAL FORM +FD27 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH FINAL FORM +FD28 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM FINAL FORM +FD29 ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH FINAL FORM +FD2A ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH FINAL FORM +FD2B ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH FINAL FORM +FD2C ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH FINAL FORM +FD2D ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM INITIAL FORM +FD2E ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH INITIAL FORM +FD2F ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH INITIAL FORM +FD30 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM INITIAL FORM +FD31 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH INITIAL FORM +FD32 ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH INITIAL FORM +FD33 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM INITIAL FORM +FD34 ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM MEDIAL FORM +FD35 ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH MEDIAL FORM +FD36 ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH MEDIAL FORM +FD37 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM MEDIAL FORM +FD38 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH MEDIAL FORM +FD39 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH MEDIAL FORM +FD3A ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM MEDIAL FORM +FD3B ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM MEDIAL FORM +FD3C..FD3D ; mapped ; 0627 064B # 1.1 ARABIC LIGATURE ALEF WITH FATHATAN FINAL FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM +FD3E..FD3F ; valid ; ; NV8 # 1.1 ORNATE LEFT PARENTHESIS..ORNATE RIGHT PARENTHESIS +FD40..FD4F ; disallowed # NA .. +FD50 ; mapped ; 062A 062C 0645 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM +FD51..FD52 ; mapped ; 062A 062D 062C #1.1 ARABIC LIGATURE TEH WITH HAH WITH JEEM FINAL FORM..ARABIC LIGATURE TEH WITH HAH WITH JEEM INITIAL FORM +FD53 ; mapped ; 062A 062D 0645 #1.1 ARABIC LIGATURE TEH WITH HAH WITH MEEM INITIAL FORM +FD54 ; mapped ; 062A 062E 0645 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH MEEM INITIAL FORM +FD55 ; mapped ; 062A 0645 062C #1.1 ARABIC LIGATURE TEH WITH MEEM WITH JEEM INITIAL FORM +FD56 ; mapped ; 062A 0645 062D #1.1 ARABIC LIGATURE TEH WITH MEEM WITH HAH INITIAL FORM +FD57 ; mapped ; 062A 0645 062E #1.1 ARABIC LIGATURE TEH WITH MEEM WITH KHAH INITIAL FORM +FD58..FD59 ; mapped ; 062C 0645 062D #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE JEEM WITH MEEM WITH HAH INITIAL FORM +FD5A ; mapped ; 062D 0645 064A #1.1 ARABIC LIGATURE HAH WITH MEEM WITH YEH FINAL FORM +FD5B ; mapped ; 062D 0645 0649 #1.1 ARABIC LIGATURE HAH WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD5C ; mapped ; 0633 062D 062C #1.1 ARABIC LIGATURE SEEN WITH HAH WITH JEEM INITIAL FORM +FD5D ; mapped ; 0633 062C 062D #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH HAH INITIAL FORM +FD5E ; mapped ; 0633 062C 0649 #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH ALEF MAKSURA FINAL FORM +FD5F..FD60 ; mapped ; 0633 0645 062D #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH HAH INITIAL FORM +FD61 ; mapped ; 0633 0645 062C #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH JEEM INITIAL FORM +FD62..FD63 ; mapped ; 0633 0645 0645 #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH MEEM INITIAL FORM +FD64..FD65 ; mapped ; 0635 062D 062D #1.1 ARABIC LIGATURE SAD WITH HAH WITH HAH FINAL FORM..ARABIC LIGATURE SAD WITH HAH WITH HAH INITIAL FORM +FD66 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM FINAL FORM +FD67..FD68 ; mapped ; 0634 062D 0645 #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH HAH WITH MEEM INITIAL FORM +FD69 ; mapped ; 0634 062C 064A #1.1 ARABIC LIGATURE SHEEN WITH JEEM WITH YEH FINAL FORM +FD6A..FD6B ; mapped ; 0634 0645 062E #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH INITIAL FORM +FD6C..FD6D ; mapped ; 0634 0645 0645 #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM INITIAL FORM +FD6E ; mapped ; 0636 062D 0649 #1.1 ARABIC LIGATURE DAD WITH HAH WITH ALEF MAKSURA FINAL FORM +FD6F..FD70 ; mapped ; 0636 062E 0645 #1.1 ARABIC LIGATURE DAD WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE DAD WITH KHAH WITH MEEM INITIAL FORM +FD71..FD72 ; mapped ; 0637 0645 062D #1.1 ARABIC LIGATURE TAH WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE TAH WITH MEEM WITH HAH INITIAL FORM +FD73 ; mapped ; 0637 0645 0645 #1.1 ARABIC LIGATURE TAH WITH MEEM WITH MEEM INITIAL FORM +FD74 ; mapped ; 0637 0645 064A #1.1 ARABIC LIGATURE TAH WITH MEEM WITH YEH FINAL FORM +FD75 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM FINAL FORM +FD76..FD77 ; mapped ; 0639 0645 0645 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE AIN WITH MEEM WITH MEEM INITIAL FORM +FD78 ; mapped ; 0639 0645 0649 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD79 ; mapped ; 063A 0645 0645 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH MEEM FINAL FORM +FD7A ; mapped ; 063A 0645 064A #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH YEH FINAL FORM +FD7B ; mapped ; 063A 0645 0649 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD7C..FD7D ; mapped ; 0641 062E 0645 #1.1 ARABIC LIGATURE FEH WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE FEH WITH KHAH WITH MEEM INITIAL FORM +FD7E ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH FINAL FORM +FD7F ; mapped ; 0642 0645 0645 #1.1 ARABIC LIGATURE QAF WITH MEEM WITH MEEM FINAL FORM +FD80 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM FINAL FORM +FD81 ; mapped ; 0644 062D 064A #1.1 ARABIC LIGATURE LAM WITH HAH WITH YEH FINAL FORM +FD82 ; mapped ; 0644 062D 0649 #1.1 ARABIC LIGATURE LAM WITH HAH WITH ALEF MAKSURA FINAL FORM +FD83..FD84 ; mapped ; 0644 062C 062C #1.1 ARABIC LIGATURE LAM WITH JEEM WITH JEEM INITIAL FORM..ARABIC LIGATURE LAM WITH JEEM WITH JEEM FINAL FORM +FD85..FD86 ; mapped ; 0644 062E 0645 #1.1 ARABIC LIGATURE LAM WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE LAM WITH KHAH WITH MEEM INITIAL FORM +FD87..FD88 ; mapped ; 0644 0645 062D #1.1 ARABIC LIGATURE LAM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE LAM WITH MEEM WITH HAH INITIAL FORM +FD89 ; mapped ; 0645 062D 062C #1.1 ARABIC LIGATURE MEEM WITH HAH WITH JEEM INITIAL FORM +FD8A ; mapped ; 0645 062D 0645 #1.1 ARABIC LIGATURE MEEM WITH HAH WITH MEEM INITIAL FORM +FD8B ; mapped ; 0645 062D 064A #1.1 ARABIC LIGATURE MEEM WITH HAH WITH YEH FINAL FORM +FD8C ; mapped ; 0645 062C 062D #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH HAH INITIAL FORM +FD8D ; mapped ; 0645 062C 0645 #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH MEEM INITIAL FORM +FD8E ; mapped ; 0645 062E 062C #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH JEEM INITIAL FORM +FD8F ; mapped ; 0645 062E 0645 #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM +FD90..FD91 ; disallowed # NA .. +FD92 ; mapped ; 0645 062C 062E #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM +FD93 ; mapped ; 0647 0645 062C #1.1 ARABIC LIGATURE HEH WITH MEEM WITH JEEM INITIAL FORM +FD94 ; mapped ; 0647 0645 0645 #1.1 ARABIC LIGATURE HEH WITH MEEM WITH MEEM INITIAL FORM +FD95 ; mapped ; 0646 062D 0645 #1.1 ARABIC LIGATURE NOON WITH HAH WITH MEEM INITIAL FORM +FD96 ; mapped ; 0646 062D 0649 #1.1 ARABIC LIGATURE NOON WITH HAH WITH ALEF MAKSURA FINAL FORM +FD97..FD98 ; mapped ; 0646 062C 0645 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH MEEM FINAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH MEEM INITIAL FORM +FD99 ; mapped ; 0646 062C 0649 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH ALEF MAKSURA FINAL FORM +FD9A ; mapped ; 0646 0645 064A #1.1 ARABIC LIGATURE NOON WITH MEEM WITH YEH FINAL FORM +FD9B ; mapped ; 0646 0645 0649 #1.1 ARABIC LIGATURE NOON WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD9C..FD9D ; mapped ; 064A 0645 0645 #1.1 ARABIC LIGATURE YEH WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE YEH WITH MEEM WITH MEEM INITIAL FORM +FD9E ; mapped ; 0628 062E 064A #1.1 ARABIC LIGATURE BEH WITH KHAH WITH YEH FINAL FORM +FD9F ; mapped ; 062A 062C 064A #1.1 ARABIC LIGATURE TEH WITH JEEM WITH YEH FINAL FORM +FDA0 ; mapped ; 062A 062C 0649 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH ALEF MAKSURA FINAL FORM +FDA1 ; mapped ; 062A 062E 064A #1.1 ARABIC LIGATURE TEH WITH KHAH WITH YEH FINAL FORM +FDA2 ; mapped ; 062A 062E 0649 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH ALEF MAKSURA FINAL FORM +FDA3 ; mapped ; 062A 0645 064A #1.1 ARABIC LIGATURE TEH WITH MEEM WITH YEH FINAL FORM +FDA4 ; mapped ; 062A 0645 0649 #1.1 ARABIC LIGATURE TEH WITH MEEM WITH ALEF MAKSURA FINAL FORM +FDA5 ; mapped ; 062C 0645 064A #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH YEH FINAL FORM +FDA6 ; mapped ; 062C 062D 0649 #1.1 ARABIC LIGATURE JEEM WITH HAH WITH ALEF MAKSURA FINAL FORM +FDA7 ; mapped ; 062C 0645 0649 #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH ALEF MAKSURA FINAL FORM +FDA8 ; mapped ; 0633 062E 0649 #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH ALEF MAKSURA FINAL FORM +FDA9 ; mapped ; 0635 062D 064A #1.1 ARABIC LIGATURE SAD WITH HAH WITH YEH FINAL FORM +FDAA ; mapped ; 0634 062D 064A #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH YEH FINAL FORM +FDAB ; mapped ; 0636 062D 064A #1.1 ARABIC LIGATURE DAD WITH HAH WITH YEH FINAL FORM +FDAC ; mapped ; 0644 062C 064A #1.1 ARABIC LIGATURE LAM WITH JEEM WITH YEH FINAL FORM +FDAD ; mapped ; 0644 0645 064A #1.1 ARABIC LIGATURE LAM WITH MEEM WITH YEH FINAL FORM +FDAE ; mapped ; 064A 062D 064A #1.1 ARABIC LIGATURE YEH WITH HAH WITH YEH FINAL FORM +FDAF ; mapped ; 064A 062C 064A #1.1 ARABIC LIGATURE YEH WITH JEEM WITH YEH FINAL FORM +FDB0 ; mapped ; 064A 0645 064A #1.1 ARABIC LIGATURE YEH WITH MEEM WITH YEH FINAL FORM +FDB1 ; mapped ; 0645 0645 064A #1.1 ARABIC LIGATURE MEEM WITH MEEM WITH YEH FINAL FORM +FDB2 ; mapped ; 0642 0645 064A #1.1 ARABIC LIGATURE QAF WITH MEEM WITH YEH FINAL FORM +FDB3 ; mapped ; 0646 062D 064A #1.1 ARABIC LIGATURE NOON WITH HAH WITH YEH FINAL FORM +FDB4 ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH INITIAL FORM +FDB5 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM INITIAL FORM +FDB6 ; mapped ; 0639 0645 064A #1.1 ARABIC LIGATURE AIN WITH MEEM WITH YEH FINAL FORM +FDB7 ; mapped ; 0643 0645 064A #1.1 ARABIC LIGATURE KAF WITH MEEM WITH YEH FINAL FORM +FDB8 ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH INITIAL FORM +FDB9 ; mapped ; 0645 062E 064A #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH YEH FINAL FORM +FDBA ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM INITIAL FORM +FDBB ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM FINAL FORM +FDBC ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM FINAL FORM +FDBD ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH FINAL FORM +FDBE ; mapped ; 062C 062D 064A #1.1 ARABIC LIGATURE JEEM WITH HAH WITH YEH FINAL FORM +FDBF ; mapped ; 062D 062C 064A #1.1 ARABIC LIGATURE HAH WITH JEEM WITH YEH FINAL FORM +FDC0 ; mapped ; 0645 062C 064A #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH YEH FINAL FORM +FDC1 ; mapped ; 0641 0645 064A #1.1 ARABIC LIGATURE FEH WITH MEEM WITH YEH FINAL FORM +FDC2 ; mapped ; 0628 062D 064A #1.1 ARABIC LIGATURE BEH WITH HAH WITH YEH FINAL FORM +FDC3 ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM INITIAL FORM +FDC4 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM INITIAL FORM +FDC5 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM INITIAL FORM +FDC6 ; mapped ; 0633 062E 064A #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH YEH FINAL FORM +FDC7 ; mapped ; 0646 062C 064A #1.1 ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM +FDC8..FDCF ; disallowed # NA .. +FDD0..FDEF ; disallowed # 3.1 .. +FDF0 ; mapped ; 0635 0644 06D2 #1.1 ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM +FDF1 ; mapped ; 0642 0644 06D2 #1.1 ARABIC LIGATURE QALA USED AS KORANIC STOP SIGN ISOLATED FORM +FDF2 ; mapped ; 0627 0644 0644 0647 #1.1 ARABIC LIGATURE ALLAH ISOLATED FORM +FDF3 ; mapped ; 0627 0643 0628 0631 #1.1 ARABIC LIGATURE AKBAR ISOLATED FORM +FDF4 ; mapped ; 0645 062D 0645 062F #1.1 ARABIC LIGATURE MOHAMMAD ISOLATED FORM +FDF5 ; mapped ; 0635 0644 0639 0645 #1.1 ARABIC LIGATURE SALAM ISOLATED FORM +FDF6 ; mapped ; 0631 0633 0648 0644 #1.1 ARABIC LIGATURE RASOUL ISOLATED FORM +FDF7 ; mapped ; 0639 0644 064A 0647 #1.1 ARABIC LIGATURE ALAYHE ISOLATED FORM +FDF8 ; mapped ; 0648 0633 0644 0645 #1.1 ARABIC LIGATURE WASALLAM ISOLATED FORM +FDF9 ; mapped ; 0635 0644 0649 #1.1 ARABIC LIGATURE SALLA ISOLATED FORM +FDFA ; disallowed_STD3_mapped ; 0635 0644 0649 0020 0627 0644 0644 0647 0020 0639 0644 064A 0647 0020 0648 0633 0644 0645 #1.1 ARABIC LIGATURE SALLALLAHOU ALAYHE WASALLAM +FDFB ; disallowed_STD3_mapped ; 062C 0644 0020 062C 0644 0627 0644 0647 #1.1 ARABIC LIGATURE JALLAJALALOUHOU +FDFC ; mapped ; 0631 06CC 0627 0644 #3.2 RIAL SIGN +FDFD ; valid ; ; NV8 # 4.0 ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM +FDFE..FDFF ; disallowed # NA .. +FE00..FE0F ; ignored # 3.2 VARIATION SELECTOR-1..VARIATION SELECTOR-16 +FE10 ; disallowed_STD3_mapped ; 002C # 4.1 PRESENTATION FORM FOR VERTICAL COMMA +FE11 ; mapped ; 3001 # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA +FE12 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP +FE13 ; disallowed_STD3_mapped ; 003A # 4.1 PRESENTATION FORM FOR VERTICAL COLON +FE14 ; disallowed_STD3_mapped ; 003B # 4.1 PRESENTATION FORM FOR VERTICAL SEMICOLON +FE15 ; disallowed_STD3_mapped ; 0021 # 4.1 PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK +FE16 ; disallowed_STD3_mapped ; 003F # 4.1 PRESENTATION FORM FOR VERTICAL QUESTION MARK +FE17 ; mapped ; 3016 # 4.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET +FE18 ; mapped ; 3017 # 4.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET +FE19 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS +FE1A..FE1F ; disallowed # NA .. +FE20..FE23 ; valid # 1.1 COMBINING LIGATURE LEFT HALF..COMBINING DOUBLE TILDE RIGHT HALF +FE24..FE26 ; valid # 5.1 COMBINING MACRON LEFT HALF..COMBINING CONJOINING MACRON +FE27..FE2D ; valid # 7.0 COMBINING LIGATURE LEFT HALF BELOW..COMBINING CONJOINING MACRON BELOW +FE2E..FE2F ; valid # 8.0 COMBINING CYRILLIC TITLO LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF +FE30 ; disallowed # 1.1 PRESENTATION FORM FOR VERTICAL TWO DOT LEADER +FE31 ; mapped ; 2014 # 1.1 PRESENTATION FORM FOR VERTICAL EM DASH +FE32 ; mapped ; 2013 # 1.1 PRESENTATION FORM FOR VERTICAL EN DASH +FE33..FE34 ; disallowed_STD3_mapped ; 005F # 1.1 PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE +FE35 ; disallowed_STD3_mapped ; 0028 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS +FE36 ; disallowed_STD3_mapped ; 0029 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS +FE37 ; disallowed_STD3_mapped ; 007B # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET +FE38 ; disallowed_STD3_mapped ; 007D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET +FE39 ; mapped ; 3014 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET +FE3A ; mapped ; 3015 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET +FE3B ; mapped ; 3010 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET +FE3C ; mapped ; 3011 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET +FE3D ; mapped ; 300A # 1.1 PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET +FE3E ; mapped ; 300B # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET +FE3F ; mapped ; 3008 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET +FE40 ; mapped ; 3009 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET +FE41 ; mapped ; 300C # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET +FE42 ; mapped ; 300D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET +FE43 ; mapped ; 300E # 1.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET +FE44 ; mapped ; 300F # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET +FE45..FE46 ; valid ; ; NV8 # 3.2 SESAME DOT..WHITE SESAME DOT +FE47 ; disallowed_STD3_mapped ; 005B # 4.0 PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET +FE48 ; disallowed_STD3_mapped ; 005D # 4.0 PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET +FE49..FE4C ; disallowed_STD3_mapped ; 0020 0305 # 1.1 DASHED OVERLINE..DOUBLE WAVY OVERLINE +FE4D..FE4F ; disallowed_STD3_mapped ; 005F # 1.1 DASHED LOW LINE..WAVY LOW LINE +FE50 ; disallowed_STD3_mapped ; 002C # 1.1 SMALL COMMA +FE51 ; mapped ; 3001 # 1.1 SMALL IDEOGRAPHIC COMMA +FE52 ; disallowed # 1.1 SMALL FULL STOP +FE53 ; disallowed # NA +FE54 ; disallowed_STD3_mapped ; 003B # 1.1 SMALL SEMICOLON +FE55 ; disallowed_STD3_mapped ; 003A # 1.1 SMALL COLON +FE56 ; disallowed_STD3_mapped ; 003F # 1.1 SMALL QUESTION MARK +FE57 ; disallowed_STD3_mapped ; 0021 # 1.1 SMALL EXCLAMATION MARK +FE58 ; mapped ; 2014 # 1.1 SMALL EM DASH +FE59 ; disallowed_STD3_mapped ; 0028 # 1.1 SMALL LEFT PARENTHESIS +FE5A ; disallowed_STD3_mapped ; 0029 # 1.1 SMALL RIGHT PARENTHESIS +FE5B ; disallowed_STD3_mapped ; 007B # 1.1 SMALL LEFT CURLY BRACKET +FE5C ; disallowed_STD3_mapped ; 007D # 1.1 SMALL RIGHT CURLY BRACKET +FE5D ; mapped ; 3014 # 1.1 SMALL LEFT TORTOISE SHELL BRACKET +FE5E ; mapped ; 3015 # 1.1 SMALL RIGHT TORTOISE SHELL BRACKET +FE5F ; disallowed_STD3_mapped ; 0023 # 1.1 SMALL NUMBER SIGN +FE60 ; disallowed_STD3_mapped ; 0026 # 1.1 SMALL AMPERSAND +FE61 ; disallowed_STD3_mapped ; 002A # 1.1 SMALL ASTERISK +FE62 ; disallowed_STD3_mapped ; 002B # 1.1 SMALL PLUS SIGN +FE63 ; mapped ; 002D # 1.1 SMALL HYPHEN-MINUS +FE64 ; disallowed_STD3_mapped ; 003C # 1.1 SMALL LESS-THAN SIGN +FE65 ; disallowed_STD3_mapped ; 003E # 1.1 SMALL GREATER-THAN SIGN +FE66 ; disallowed_STD3_mapped ; 003D # 1.1 SMALL EQUALS SIGN +FE67 ; disallowed # NA +FE68 ; disallowed_STD3_mapped ; 005C # 1.1 SMALL REVERSE SOLIDUS +FE69 ; disallowed_STD3_mapped ; 0024 # 1.1 SMALL DOLLAR SIGN +FE6A ; disallowed_STD3_mapped ; 0025 # 1.1 SMALL PERCENT SIGN +FE6B ; disallowed_STD3_mapped ; 0040 # 1.1 SMALL COMMERCIAL AT +FE6C..FE6F ; disallowed # NA .. +FE70 ; disallowed_STD3_mapped ; 0020 064B # 1.1 ARABIC FATHATAN ISOLATED FORM +FE71 ; mapped ; 0640 064B # 1.1 ARABIC TATWEEL WITH FATHATAN ABOVE +FE72 ; disallowed_STD3_mapped ; 0020 064C # 1.1 ARABIC DAMMATAN ISOLATED FORM +FE73 ; valid # 3.2 ARABIC TAIL FRAGMENT +FE74 ; disallowed_STD3_mapped ; 0020 064D # 1.1 ARABIC KASRATAN ISOLATED FORM +FE75 ; disallowed # NA +FE76 ; disallowed_STD3_mapped ; 0020 064E # 1.1 ARABIC FATHA ISOLATED FORM +FE77 ; mapped ; 0640 064E # 1.1 ARABIC FATHA MEDIAL FORM +FE78 ; disallowed_STD3_mapped ; 0020 064F # 1.1 ARABIC DAMMA ISOLATED FORM +FE79 ; mapped ; 0640 064F # 1.1 ARABIC DAMMA MEDIAL FORM +FE7A ; disallowed_STD3_mapped ; 0020 0650 # 1.1 ARABIC KASRA ISOLATED FORM +FE7B ; mapped ; 0640 0650 # 1.1 ARABIC KASRA MEDIAL FORM +FE7C ; disallowed_STD3_mapped ; 0020 0651 # 1.1 ARABIC SHADDA ISOLATED FORM +FE7D ; mapped ; 0640 0651 # 1.1 ARABIC SHADDA MEDIAL FORM +FE7E ; disallowed_STD3_mapped ; 0020 0652 # 1.1 ARABIC SUKUN ISOLATED FORM +FE7F ; mapped ; 0640 0652 # 1.1 ARABIC SUKUN MEDIAL FORM +FE80 ; mapped ; 0621 # 1.1 ARABIC LETTER HAMZA ISOLATED FORM +FE81..FE82 ; mapped ; 0622 # 1.1 ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM +FE83..FE84 ; mapped ; 0623 # 1.1 ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM +FE85..FE86 ; mapped ; 0624 # 1.1 ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER WAW WITH HAMZA ABOVE FINAL FORM +FE87..FE88 ; mapped ; 0625 # 1.1 ARABIC LETTER ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA BELOW FINAL FORM +FE89..FE8C ; mapped ; 0626 # 1.1 ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH WITH HAMZA ABOVE MEDIAL FORM +FE8D..FE8E ; mapped ; 0627 # 1.1 ARABIC LETTER ALEF ISOLATED FORM..ARABIC LETTER ALEF FINAL FORM +FE8F..FE92 ; mapped ; 0628 # 1.1 ARABIC LETTER BEH ISOLATED FORM..ARABIC LETTER BEH MEDIAL FORM +FE93..FE94 ; mapped ; 0629 # 1.1 ARABIC LETTER TEH MARBUTA ISOLATED FORM..ARABIC LETTER TEH MARBUTA FINAL FORM +FE95..FE98 ; mapped ; 062A # 1.1 ARABIC LETTER TEH ISOLATED FORM..ARABIC LETTER TEH MEDIAL FORM +FE99..FE9C ; mapped ; 062B # 1.1 ARABIC LETTER THEH ISOLATED FORM..ARABIC LETTER THEH MEDIAL FORM +FE9D..FEA0 ; mapped ; 062C # 1.1 ARABIC LETTER JEEM ISOLATED FORM..ARABIC LETTER JEEM MEDIAL FORM +FEA1..FEA4 ; mapped ; 062D # 1.1 ARABIC LETTER HAH ISOLATED FORM..ARABIC LETTER HAH MEDIAL FORM +FEA5..FEA8 ; mapped ; 062E # 1.1 ARABIC LETTER KHAH ISOLATED FORM..ARABIC LETTER KHAH MEDIAL FORM +FEA9..FEAA ; mapped ; 062F # 1.1 ARABIC LETTER DAL ISOLATED FORM..ARABIC LETTER DAL FINAL FORM +FEAB..FEAC ; mapped ; 0630 # 1.1 ARABIC LETTER THAL ISOLATED FORM..ARABIC LETTER THAL FINAL FORM +FEAD..FEAE ; mapped ; 0631 # 1.1 ARABIC LETTER REH ISOLATED FORM..ARABIC LETTER REH FINAL FORM +FEAF..FEB0 ; mapped ; 0632 # 1.1 ARABIC LETTER ZAIN ISOLATED FORM..ARABIC LETTER ZAIN FINAL FORM +FEB1..FEB4 ; mapped ; 0633 # 1.1 ARABIC LETTER SEEN ISOLATED FORM..ARABIC LETTER SEEN MEDIAL FORM +FEB5..FEB8 ; mapped ; 0634 # 1.1 ARABIC LETTER SHEEN ISOLATED FORM..ARABIC LETTER SHEEN MEDIAL FORM +FEB9..FEBC ; mapped ; 0635 # 1.1 ARABIC LETTER SAD ISOLATED FORM..ARABIC LETTER SAD MEDIAL FORM +FEBD..FEC0 ; mapped ; 0636 # 1.1 ARABIC LETTER DAD ISOLATED FORM..ARABIC LETTER DAD MEDIAL FORM +FEC1..FEC4 ; mapped ; 0637 # 1.1 ARABIC LETTER TAH ISOLATED FORM..ARABIC LETTER TAH MEDIAL FORM +FEC5..FEC8 ; mapped ; 0638 # 1.1 ARABIC LETTER ZAH ISOLATED FORM..ARABIC LETTER ZAH MEDIAL FORM +FEC9..FECC ; mapped ; 0639 # 1.1 ARABIC LETTER AIN ISOLATED FORM..ARABIC LETTER AIN MEDIAL FORM +FECD..FED0 ; mapped ; 063A # 1.1 ARABIC LETTER GHAIN ISOLATED FORM..ARABIC LETTER GHAIN MEDIAL FORM +FED1..FED4 ; mapped ; 0641 # 1.1 ARABIC LETTER FEH ISOLATED FORM..ARABIC LETTER FEH MEDIAL FORM +FED5..FED8 ; mapped ; 0642 # 1.1 ARABIC LETTER QAF ISOLATED FORM..ARABIC LETTER QAF MEDIAL FORM +FED9..FEDC ; mapped ; 0643 # 1.1 ARABIC LETTER KAF ISOLATED FORM..ARABIC LETTER KAF MEDIAL FORM +FEDD..FEE0 ; mapped ; 0644 # 1.1 ARABIC LETTER LAM ISOLATED FORM..ARABIC LETTER LAM MEDIAL FORM +FEE1..FEE4 ; mapped ; 0645 # 1.1 ARABIC LETTER MEEM ISOLATED FORM..ARABIC LETTER MEEM MEDIAL FORM +FEE5..FEE8 ; mapped ; 0646 # 1.1 ARABIC LETTER NOON ISOLATED FORM..ARABIC LETTER NOON MEDIAL FORM +FEE9..FEEC ; mapped ; 0647 # 1.1 ARABIC LETTER HEH ISOLATED FORM..ARABIC LETTER HEH MEDIAL FORM +FEED..FEEE ; mapped ; 0648 # 1.1 ARABIC LETTER WAW ISOLATED FORM..ARABIC LETTER WAW FINAL FORM +FEEF..FEF0 ; mapped ; 0649 # 1.1 ARABIC LETTER ALEF MAKSURA ISOLATED FORM..ARABIC LETTER ALEF MAKSURA FINAL FORM +FEF1..FEF4 ; mapped ; 064A # 1.1 ARABIC LETTER YEH ISOLATED FORM..ARABIC LETTER YEH MEDIAL FORM +FEF5..FEF6 ; mapped ; 0644 0622 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM +FEF7..FEF8 ; mapped ; 0644 0623 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM +FEF9..FEFA ; mapped ; 0644 0625 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW FINAL FORM +FEFB..FEFC ; mapped ; 0644 0627 # 1.1 ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM +FEFD..FEFE ; disallowed # NA .. +FEFF ; ignored # 1.1 ZERO WIDTH NO-BREAK SPACE +FF00 ; disallowed # NA +FF01 ; disallowed_STD3_mapped ; 0021 # 1.1 FULLWIDTH EXCLAMATION MARK +FF02 ; disallowed_STD3_mapped ; 0022 # 1.1 FULLWIDTH QUOTATION MARK +FF03 ; disallowed_STD3_mapped ; 0023 # 1.1 FULLWIDTH NUMBER SIGN +FF04 ; disallowed_STD3_mapped ; 0024 # 1.1 FULLWIDTH DOLLAR SIGN +FF05 ; disallowed_STD3_mapped ; 0025 # 1.1 FULLWIDTH PERCENT SIGN +FF06 ; disallowed_STD3_mapped ; 0026 # 1.1 FULLWIDTH AMPERSAND +FF07 ; disallowed_STD3_mapped ; 0027 # 1.1 FULLWIDTH APOSTROPHE +FF08 ; disallowed_STD3_mapped ; 0028 # 1.1 FULLWIDTH LEFT PARENTHESIS +FF09 ; disallowed_STD3_mapped ; 0029 # 1.1 FULLWIDTH RIGHT PARENTHESIS +FF0A ; disallowed_STD3_mapped ; 002A # 1.1 FULLWIDTH ASTERISK +FF0B ; disallowed_STD3_mapped ; 002B # 1.1 FULLWIDTH PLUS SIGN +FF0C ; disallowed_STD3_mapped ; 002C # 1.1 FULLWIDTH COMMA +FF0D ; mapped ; 002D # 1.1 FULLWIDTH HYPHEN-MINUS +FF0E ; mapped ; 002E # 1.1 FULLWIDTH FULL STOP +FF0F ; disallowed_STD3_mapped ; 002F # 1.1 FULLWIDTH SOLIDUS +FF10 ; mapped ; 0030 # 1.1 FULLWIDTH DIGIT ZERO +FF11 ; mapped ; 0031 # 1.1 FULLWIDTH DIGIT ONE +FF12 ; mapped ; 0032 # 1.1 FULLWIDTH DIGIT TWO +FF13 ; mapped ; 0033 # 1.1 FULLWIDTH DIGIT THREE +FF14 ; mapped ; 0034 # 1.1 FULLWIDTH DIGIT FOUR +FF15 ; mapped ; 0035 # 1.1 FULLWIDTH DIGIT FIVE +FF16 ; mapped ; 0036 # 1.1 FULLWIDTH DIGIT SIX +FF17 ; mapped ; 0037 # 1.1 FULLWIDTH DIGIT SEVEN +FF18 ; mapped ; 0038 # 1.1 FULLWIDTH DIGIT EIGHT +FF19 ; mapped ; 0039 # 1.1 FULLWIDTH DIGIT NINE +FF1A ; disallowed_STD3_mapped ; 003A # 1.1 FULLWIDTH COLON +FF1B ; disallowed_STD3_mapped ; 003B # 1.1 FULLWIDTH SEMICOLON +FF1C ; disallowed_STD3_mapped ; 003C # 1.1 FULLWIDTH LESS-THAN SIGN +FF1D ; disallowed_STD3_mapped ; 003D # 1.1 FULLWIDTH EQUALS SIGN +FF1E ; disallowed_STD3_mapped ; 003E # 1.1 FULLWIDTH GREATER-THAN SIGN +FF1F ; disallowed_STD3_mapped ; 003F # 1.1 FULLWIDTH QUESTION MARK +FF20 ; disallowed_STD3_mapped ; 0040 # 1.1 FULLWIDTH COMMERCIAL AT +FF21 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN CAPITAL LETTER A +FF22 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN CAPITAL LETTER B +FF23 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN CAPITAL LETTER C +FF24 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN CAPITAL LETTER D +FF25 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN CAPITAL LETTER E +FF26 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN CAPITAL LETTER F +FF27 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN CAPITAL LETTER G +FF28 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN CAPITAL LETTER H +FF29 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN CAPITAL LETTER I +FF2A ; mapped ; 006A # 1.1 FULLWIDTH LATIN CAPITAL LETTER J +FF2B ; mapped ; 006B # 1.1 FULLWIDTH LATIN CAPITAL LETTER K +FF2C ; mapped ; 006C # 1.1 FULLWIDTH LATIN CAPITAL LETTER L +FF2D ; mapped ; 006D # 1.1 FULLWIDTH LATIN CAPITAL LETTER M +FF2E ; mapped ; 006E # 1.1 FULLWIDTH LATIN CAPITAL LETTER N +FF2F ; mapped ; 006F # 1.1 FULLWIDTH LATIN CAPITAL LETTER O +FF30 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN CAPITAL LETTER P +FF31 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Q +FF32 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN CAPITAL LETTER R +FF33 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN CAPITAL LETTER S +FF34 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN CAPITAL LETTER T +FF35 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN CAPITAL LETTER U +FF36 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN CAPITAL LETTER V +FF37 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN CAPITAL LETTER W +FF38 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN CAPITAL LETTER X +FF39 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Y +FF3A ; mapped ; 007A # 1.1 FULLWIDTH LATIN CAPITAL LETTER Z +FF3B ; disallowed_STD3_mapped ; 005B # 1.1 FULLWIDTH LEFT SQUARE BRACKET +FF3C ; disallowed_STD3_mapped ; 005C # 1.1 FULLWIDTH REVERSE SOLIDUS +FF3D ; disallowed_STD3_mapped ; 005D # 1.1 FULLWIDTH RIGHT SQUARE BRACKET +FF3E ; disallowed_STD3_mapped ; 005E # 1.1 FULLWIDTH CIRCUMFLEX ACCENT +FF3F ; disallowed_STD3_mapped ; 005F # 1.1 FULLWIDTH LOW LINE +FF40 ; disallowed_STD3_mapped ; 0060 # 1.1 FULLWIDTH GRAVE ACCENT +FF41 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN SMALL LETTER A +FF42 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN SMALL LETTER B +FF43 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN SMALL LETTER C +FF44 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN SMALL LETTER D +FF45 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN SMALL LETTER E +FF46 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN SMALL LETTER F +FF47 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN SMALL LETTER G +FF48 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN SMALL LETTER H +FF49 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN SMALL LETTER I +FF4A ; mapped ; 006A # 1.1 FULLWIDTH LATIN SMALL LETTER J +FF4B ; mapped ; 006B # 1.1 FULLWIDTH LATIN SMALL LETTER K +FF4C ; mapped ; 006C # 1.1 FULLWIDTH LATIN SMALL LETTER L +FF4D ; mapped ; 006D # 1.1 FULLWIDTH LATIN SMALL LETTER M +FF4E ; mapped ; 006E # 1.1 FULLWIDTH LATIN SMALL LETTER N +FF4F ; mapped ; 006F # 1.1 FULLWIDTH LATIN SMALL LETTER O +FF50 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN SMALL LETTER P +FF51 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN SMALL LETTER Q +FF52 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN SMALL LETTER R +FF53 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN SMALL LETTER S +FF54 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN SMALL LETTER T +FF55 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN SMALL LETTER U +FF56 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN SMALL LETTER V +FF57 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN SMALL LETTER W +FF58 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN SMALL LETTER X +FF59 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN SMALL LETTER Y +FF5A ; mapped ; 007A # 1.1 FULLWIDTH LATIN SMALL LETTER Z +FF5B ; disallowed_STD3_mapped ; 007B # 1.1 FULLWIDTH LEFT CURLY BRACKET +FF5C ; disallowed_STD3_mapped ; 007C # 1.1 FULLWIDTH VERTICAL LINE +FF5D ; disallowed_STD3_mapped ; 007D # 1.1 FULLWIDTH RIGHT CURLY BRACKET +FF5E ; disallowed_STD3_mapped ; 007E # 1.1 FULLWIDTH TILDE +FF5F ; mapped ; 2985 # 3.2 FULLWIDTH LEFT WHITE PARENTHESIS +FF60 ; mapped ; 2986 # 3.2 FULLWIDTH RIGHT WHITE PARENTHESIS +FF61 ; mapped ; 002E # 1.1 HALFWIDTH IDEOGRAPHIC FULL STOP +FF62 ; mapped ; 300C # 1.1 HALFWIDTH LEFT CORNER BRACKET +FF63 ; mapped ; 300D # 1.1 HALFWIDTH RIGHT CORNER BRACKET +FF64 ; mapped ; 3001 # 1.1 HALFWIDTH IDEOGRAPHIC COMMA +FF65 ; mapped ; 30FB # 1.1 HALFWIDTH KATAKANA MIDDLE DOT +FF66 ; mapped ; 30F2 # 1.1 HALFWIDTH KATAKANA LETTER WO +FF67 ; mapped ; 30A1 # 1.1 HALFWIDTH KATAKANA LETTER SMALL A +FF68 ; mapped ; 30A3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL I +FF69 ; mapped ; 30A5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL U +FF6A ; mapped ; 30A7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL E +FF6B ; mapped ; 30A9 # 1.1 HALFWIDTH KATAKANA LETTER SMALL O +FF6C ; mapped ; 30E3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YA +FF6D ; mapped ; 30E5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YU +FF6E ; mapped ; 30E7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YO +FF6F ; mapped ; 30C3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL TU +FF70 ; mapped ; 30FC # 1.1 HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK +FF71 ; mapped ; 30A2 # 1.1 HALFWIDTH KATAKANA LETTER A +FF72 ; mapped ; 30A4 # 1.1 HALFWIDTH KATAKANA LETTER I +FF73 ; mapped ; 30A6 # 1.1 HALFWIDTH KATAKANA LETTER U +FF74 ; mapped ; 30A8 # 1.1 HALFWIDTH KATAKANA LETTER E +FF75 ; mapped ; 30AA # 1.1 HALFWIDTH KATAKANA LETTER O +FF76 ; mapped ; 30AB # 1.1 HALFWIDTH KATAKANA LETTER KA +FF77 ; mapped ; 30AD # 1.1 HALFWIDTH KATAKANA LETTER KI +FF78 ; mapped ; 30AF # 1.1 HALFWIDTH KATAKANA LETTER KU +FF79 ; mapped ; 30B1 # 1.1 HALFWIDTH KATAKANA LETTER KE +FF7A ; mapped ; 30B3 # 1.1 HALFWIDTH KATAKANA LETTER KO +FF7B ; mapped ; 30B5 # 1.1 HALFWIDTH KATAKANA LETTER SA +FF7C ; mapped ; 30B7 # 1.1 HALFWIDTH KATAKANA LETTER SI +FF7D ; mapped ; 30B9 # 1.1 HALFWIDTH KATAKANA LETTER SU +FF7E ; mapped ; 30BB # 1.1 HALFWIDTH KATAKANA LETTER SE +FF7F ; mapped ; 30BD # 1.1 HALFWIDTH KATAKANA LETTER SO +FF80 ; mapped ; 30BF # 1.1 HALFWIDTH KATAKANA LETTER TA +FF81 ; mapped ; 30C1 # 1.1 HALFWIDTH KATAKANA LETTER TI +FF82 ; mapped ; 30C4 # 1.1 HALFWIDTH KATAKANA LETTER TU +FF83 ; mapped ; 30C6 # 1.1 HALFWIDTH KATAKANA LETTER TE +FF84 ; mapped ; 30C8 # 1.1 HALFWIDTH KATAKANA LETTER TO +FF85 ; mapped ; 30CA # 1.1 HALFWIDTH KATAKANA LETTER NA +FF86 ; mapped ; 30CB # 1.1 HALFWIDTH KATAKANA LETTER NI +FF87 ; mapped ; 30CC # 1.1 HALFWIDTH KATAKANA LETTER NU +FF88 ; mapped ; 30CD # 1.1 HALFWIDTH KATAKANA LETTER NE +FF89 ; mapped ; 30CE # 1.1 HALFWIDTH KATAKANA LETTER NO +FF8A ; mapped ; 30CF # 1.1 HALFWIDTH KATAKANA LETTER HA +FF8B ; mapped ; 30D2 # 1.1 HALFWIDTH KATAKANA LETTER HI +FF8C ; mapped ; 30D5 # 1.1 HALFWIDTH KATAKANA LETTER HU +FF8D ; mapped ; 30D8 # 1.1 HALFWIDTH KATAKANA LETTER HE +FF8E ; mapped ; 30DB # 1.1 HALFWIDTH KATAKANA LETTER HO +FF8F ; mapped ; 30DE # 1.1 HALFWIDTH KATAKANA LETTER MA +FF90 ; mapped ; 30DF # 1.1 HALFWIDTH KATAKANA LETTER MI +FF91 ; mapped ; 30E0 # 1.1 HALFWIDTH KATAKANA LETTER MU +FF92 ; mapped ; 30E1 # 1.1 HALFWIDTH KATAKANA LETTER ME +FF93 ; mapped ; 30E2 # 1.1 HALFWIDTH KATAKANA LETTER MO +FF94 ; mapped ; 30E4 # 1.1 HALFWIDTH KATAKANA LETTER YA +FF95 ; mapped ; 30E6 # 1.1 HALFWIDTH KATAKANA LETTER YU +FF96 ; mapped ; 30E8 # 1.1 HALFWIDTH KATAKANA LETTER YO +FF97 ; mapped ; 30E9 # 1.1 HALFWIDTH KATAKANA LETTER RA +FF98 ; mapped ; 30EA # 1.1 HALFWIDTH KATAKANA LETTER RI +FF99 ; mapped ; 30EB # 1.1 HALFWIDTH KATAKANA LETTER RU +FF9A ; mapped ; 30EC # 1.1 HALFWIDTH KATAKANA LETTER RE +FF9B ; mapped ; 30ED # 1.1 HALFWIDTH KATAKANA LETTER RO +FF9C ; mapped ; 30EF # 1.1 HALFWIDTH KATAKANA LETTER WA +FF9D ; mapped ; 30F3 # 1.1 HALFWIDTH KATAKANA LETTER N +FF9E ; mapped ; 3099 # 1.1 HALFWIDTH KATAKANA VOICED SOUND MARK +FF9F ; mapped ; 309A # 1.1 HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK +FFA0 ; disallowed # 1.1 HALFWIDTH HANGUL FILLER +FFA1 ; mapped ; 1100 # 1.1 HALFWIDTH HANGUL LETTER KIYEOK +FFA2 ; mapped ; 1101 # 1.1 HALFWIDTH HANGUL LETTER SSANGKIYEOK +FFA3 ; mapped ; 11AA # 1.1 HALFWIDTH HANGUL LETTER KIYEOK-SIOS +FFA4 ; mapped ; 1102 # 1.1 HALFWIDTH HANGUL LETTER NIEUN +FFA5 ; mapped ; 11AC # 1.1 HALFWIDTH HANGUL LETTER NIEUN-CIEUC +FFA6 ; mapped ; 11AD # 1.1 HALFWIDTH HANGUL LETTER NIEUN-HIEUH +FFA7 ; mapped ; 1103 # 1.1 HALFWIDTH HANGUL LETTER TIKEUT +FFA8 ; mapped ; 1104 # 1.1 HALFWIDTH HANGUL LETTER SSANGTIKEUT +FFA9 ; mapped ; 1105 # 1.1 HALFWIDTH HANGUL LETTER RIEUL +FFAA ; mapped ; 11B0 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-KIYEOK +FFAB ; mapped ; 11B1 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-MIEUM +FFAC ; mapped ; 11B2 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PIEUP +FFAD ; mapped ; 11B3 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-SIOS +FFAE ; mapped ; 11B4 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-THIEUTH +FFAF ; mapped ; 11B5 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PHIEUPH +FFB0 ; mapped ; 111A # 1.1 HALFWIDTH HANGUL LETTER RIEUL-HIEUH +FFB1 ; mapped ; 1106 # 1.1 HALFWIDTH HANGUL LETTER MIEUM +FFB2 ; mapped ; 1107 # 1.1 HALFWIDTH HANGUL LETTER PIEUP +FFB3 ; mapped ; 1108 # 1.1 HALFWIDTH HANGUL LETTER SSANGPIEUP +FFB4 ; mapped ; 1121 # 1.1 HALFWIDTH HANGUL LETTER PIEUP-SIOS +FFB5 ; mapped ; 1109 # 1.1 HALFWIDTH HANGUL LETTER SIOS +FFB6 ; mapped ; 110A # 1.1 HALFWIDTH HANGUL LETTER SSANGSIOS +FFB7 ; mapped ; 110B # 1.1 HALFWIDTH HANGUL LETTER IEUNG +FFB8 ; mapped ; 110C # 1.1 HALFWIDTH HANGUL LETTER CIEUC +FFB9 ; mapped ; 110D # 1.1 HALFWIDTH HANGUL LETTER SSANGCIEUC +FFBA ; mapped ; 110E # 1.1 HALFWIDTH HANGUL LETTER CHIEUCH +FFBB ; mapped ; 110F # 1.1 HALFWIDTH HANGUL LETTER KHIEUKH +FFBC ; mapped ; 1110 # 1.1 HALFWIDTH HANGUL LETTER THIEUTH +FFBD ; mapped ; 1111 # 1.1 HALFWIDTH HANGUL LETTER PHIEUPH +FFBE ; mapped ; 1112 # 1.1 HALFWIDTH HANGUL LETTER HIEUH +FFBF..FFC1 ; disallowed # NA .. +FFC2 ; mapped ; 1161 # 1.1 HALFWIDTH HANGUL LETTER A +FFC3 ; mapped ; 1162 # 1.1 HALFWIDTH HANGUL LETTER AE +FFC4 ; mapped ; 1163 # 1.1 HALFWIDTH HANGUL LETTER YA +FFC5 ; mapped ; 1164 # 1.1 HALFWIDTH HANGUL LETTER YAE +FFC6 ; mapped ; 1165 # 1.1 HALFWIDTH HANGUL LETTER EO +FFC7 ; mapped ; 1166 # 1.1 HALFWIDTH HANGUL LETTER E +FFC8..FFC9 ; disallowed # NA .. +FFCA ; mapped ; 1167 # 1.1 HALFWIDTH HANGUL LETTER YEO +FFCB ; mapped ; 1168 # 1.1 HALFWIDTH HANGUL LETTER YE +FFCC ; mapped ; 1169 # 1.1 HALFWIDTH HANGUL LETTER O +FFCD ; mapped ; 116A # 1.1 HALFWIDTH HANGUL LETTER WA +FFCE ; mapped ; 116B # 1.1 HALFWIDTH HANGUL LETTER WAE +FFCF ; mapped ; 116C # 1.1 HALFWIDTH HANGUL LETTER OE +FFD0..FFD1 ; disallowed # NA .. +FFD2 ; mapped ; 116D # 1.1 HALFWIDTH HANGUL LETTER YO +FFD3 ; mapped ; 116E # 1.1 HALFWIDTH HANGUL LETTER U +FFD4 ; mapped ; 116F # 1.1 HALFWIDTH HANGUL LETTER WEO +FFD5 ; mapped ; 1170 # 1.1 HALFWIDTH HANGUL LETTER WE +FFD6 ; mapped ; 1171 # 1.1 HALFWIDTH HANGUL LETTER WI +FFD7 ; mapped ; 1172 # 1.1 HALFWIDTH HANGUL LETTER YU +FFD8..FFD9 ; disallowed # NA .. +FFDA ; mapped ; 1173 # 1.1 HALFWIDTH HANGUL LETTER EU +FFDB ; mapped ; 1174 # 1.1 HALFWIDTH HANGUL LETTER YI +FFDC ; mapped ; 1175 # 1.1 HALFWIDTH HANGUL LETTER I +FFDD..FFDF ; disallowed # NA .. +FFE0 ; mapped ; 00A2 # 1.1 FULLWIDTH CENT SIGN +FFE1 ; mapped ; 00A3 # 1.1 FULLWIDTH POUND SIGN +FFE2 ; mapped ; 00AC # 1.1 FULLWIDTH NOT SIGN +FFE3 ; disallowed_STD3_mapped ; 0020 0304 # 1.1 FULLWIDTH MACRON +FFE4 ; mapped ; 00A6 # 1.1 FULLWIDTH BROKEN BAR +FFE5 ; mapped ; 00A5 # 1.1 FULLWIDTH YEN SIGN +FFE6 ; mapped ; 20A9 # 1.1 FULLWIDTH WON SIGN +FFE7 ; disallowed # NA +FFE8 ; mapped ; 2502 # 1.1 HALFWIDTH FORMS LIGHT VERTICAL +FFE9 ; mapped ; 2190 # 1.1 HALFWIDTH LEFTWARDS ARROW +FFEA ; mapped ; 2191 # 1.1 HALFWIDTH UPWARDS ARROW +FFEB ; mapped ; 2192 # 1.1 HALFWIDTH RIGHTWARDS ARROW +FFEC ; mapped ; 2193 # 1.1 HALFWIDTH DOWNWARDS ARROW +FFED ; mapped ; 25A0 # 1.1 HALFWIDTH BLACK SQUARE +FFEE ; mapped ; 25CB # 1.1 HALFWIDTH WHITE CIRCLE +FFEF..FFF8 ; disallowed # NA .. +FFF9..FFFB ; disallowed # 3.0 INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR +FFFC ; disallowed # 2.1 OBJECT REPLACEMENT CHARACTER +FFFD ; disallowed # 1.1 REPLACEMENT CHARACTER +FFFE..FFFF ; disallowed # 1.1 .. +10000..1000B ; valid # 4.0 LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE +1000C ; disallowed # NA +1000D..10026 ; valid # 4.0 LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO +10027 ; disallowed # NA +10028..1003A ; valid # 4.0 LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO +1003B ; disallowed # NA +1003C..1003D ; valid # 4.0 LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE +1003E ; disallowed # NA +1003F..1004D ; valid # 4.0 LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO +1004E..1004F ; disallowed # NA .. +10050..1005D ; valid # 4.0 LINEAR B SYMBOL B018..LINEAR B SYMBOL B089 +1005E..1007F ; disallowed # NA .. +10080..100FA ; valid # 4.0 LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305 +100FB..100FF ; disallowed # NA .. +10100..10102 ; valid ; ; NV8 # 4.0 AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK +10103..10106 ; disallowed # NA .. +10107..10133 ; valid ; ; NV8 # 4.0 AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND +10134..10136 ; disallowed # NA .. +10137..1013F ; valid ; ; NV8 # 4.0 AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT +10140..1018A ; valid ; ; NV8 # 4.1 GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ZERO SIGN +1018B..1018C ; valid ; ; NV8 # 7.0 GREEK ONE QUARTER SIGN..GREEK SINUSOID SIGN +1018D..1018E ; valid ; ; NV8 # 9.0 GREEK INDICTION SIGN..NOMISMA SIGN +1018F ; disallowed # NA +10190..1019B ; valid ; ; NV8 # 5.1 ROMAN SEXTANS SIGN..ROMAN CENTURIAL SIGN +1019C..1019F ; disallowed # NA .. +101A0 ; valid ; ; NV8 # 7.0 GREEK SYMBOL TAU RHO +101A1..101CF ; disallowed # NA .. +101D0..101FC ; valid ; ; NV8 # 5.1 PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND +101FD ; valid # 5.1 PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE +101FE..1027F ; disallowed # NA .. +10280..1029C ; valid # 5.1 LYCIAN LETTER A..LYCIAN LETTER X +1029D..1029F ; disallowed # NA .. +102A0..102D0 ; valid # 5.1 CARIAN LETTER A..CARIAN LETTER UUU3 +102D1..102DF ; disallowed # NA .. +102E0 ; valid # 7.0 COPTIC EPACT THOUSANDS MARK +102E1..102FB ; valid ; ; NV8 # 7.0 COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED +102FC..102FF ; disallowed # NA .. +10300..1031E ; valid # 3.1 OLD ITALIC LETTER A..OLD ITALIC LETTER UU +1031F ; valid # 7.0 OLD ITALIC LETTER ESS +10320..10323 ; valid ; ; NV8 # 3.1 OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY +10324..1032F ; disallowed # NA .. +10330..10340 ; valid # 3.1 GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA +10341 ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINETY +10342..10349 ; valid # 3.1 GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL +1034A ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINE HUNDRED +1034B..1034F ; disallowed # NA .. +10350..1037A ; valid # 7.0 OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII +1037B..1037F ; disallowed # NA .. +10380..1039D ; valid # 4.0 UGARITIC LETTER ALPA..UGARITIC LETTER SSU +1039E ; disallowed # NA +1039F ; valid ; ; NV8 # 4.0 UGARITIC WORD DIVIDER +103A0..103C3 ; valid # 4.1 OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA +103C4..103C7 ; disallowed # NA .. +103C8..103CF ; valid # 4.1 OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH +103D0..103D5 ; valid ; ; NV8 # 4.1 OLD PERSIAN WORD DIVIDER..OLD PERSIAN NUMBER HUNDRED +103D6..103FF ; disallowed # NA .. +10400 ; mapped ; 10428 # 3.1 DESERET CAPITAL LETTER LONG I +10401 ; mapped ; 10429 # 3.1 DESERET CAPITAL LETTER LONG E +10402 ; mapped ; 1042A # 3.1 DESERET CAPITAL LETTER LONG A +10403 ; mapped ; 1042B # 3.1 DESERET CAPITAL LETTER LONG AH +10404 ; mapped ; 1042C # 3.1 DESERET CAPITAL LETTER LONG O +10405 ; mapped ; 1042D # 3.1 DESERET CAPITAL LETTER LONG OO +10406 ; mapped ; 1042E # 3.1 DESERET CAPITAL LETTER SHORT I +10407 ; mapped ; 1042F # 3.1 DESERET CAPITAL LETTER SHORT E +10408 ; mapped ; 10430 # 3.1 DESERET CAPITAL LETTER SHORT A +10409 ; mapped ; 10431 # 3.1 DESERET CAPITAL LETTER SHORT AH +1040A ; mapped ; 10432 # 3.1 DESERET CAPITAL LETTER SHORT O +1040B ; mapped ; 10433 # 3.1 DESERET CAPITAL LETTER SHORT OO +1040C ; mapped ; 10434 # 3.1 DESERET CAPITAL LETTER AY +1040D ; mapped ; 10435 # 3.1 DESERET CAPITAL LETTER OW +1040E ; mapped ; 10436 # 3.1 DESERET CAPITAL LETTER WU +1040F ; mapped ; 10437 # 3.1 DESERET CAPITAL LETTER YEE +10410 ; mapped ; 10438 # 3.1 DESERET CAPITAL LETTER H +10411 ; mapped ; 10439 # 3.1 DESERET CAPITAL LETTER PEE +10412 ; mapped ; 1043A # 3.1 DESERET CAPITAL LETTER BEE +10413 ; mapped ; 1043B # 3.1 DESERET CAPITAL LETTER TEE +10414 ; mapped ; 1043C # 3.1 DESERET CAPITAL LETTER DEE +10415 ; mapped ; 1043D # 3.1 DESERET CAPITAL LETTER CHEE +10416 ; mapped ; 1043E # 3.1 DESERET CAPITAL LETTER JEE +10417 ; mapped ; 1043F # 3.1 DESERET CAPITAL LETTER KAY +10418 ; mapped ; 10440 # 3.1 DESERET CAPITAL LETTER GAY +10419 ; mapped ; 10441 # 3.1 DESERET CAPITAL LETTER EF +1041A ; mapped ; 10442 # 3.1 DESERET CAPITAL LETTER VEE +1041B ; mapped ; 10443 # 3.1 DESERET CAPITAL LETTER ETH +1041C ; mapped ; 10444 # 3.1 DESERET CAPITAL LETTER THEE +1041D ; mapped ; 10445 # 3.1 DESERET CAPITAL LETTER ES +1041E ; mapped ; 10446 # 3.1 DESERET CAPITAL LETTER ZEE +1041F ; mapped ; 10447 # 3.1 DESERET CAPITAL LETTER ESH +10420 ; mapped ; 10448 # 3.1 DESERET CAPITAL LETTER ZHEE +10421 ; mapped ; 10449 # 3.1 DESERET CAPITAL LETTER ER +10422 ; mapped ; 1044A # 3.1 DESERET CAPITAL LETTER EL +10423 ; mapped ; 1044B # 3.1 DESERET CAPITAL LETTER EM +10424 ; mapped ; 1044C # 3.1 DESERET CAPITAL LETTER EN +10425 ; mapped ; 1044D # 3.1 DESERET CAPITAL LETTER ENG +10426 ; mapped ; 1044E # 4.0 DESERET CAPITAL LETTER OI +10427 ; mapped ; 1044F # 4.0 DESERET CAPITAL LETTER EW +10428..1044D ; valid # 3.1 DESERET SMALL LETTER LONG I..DESERET SMALL LETTER ENG +1044E..1049D ; valid # 4.0 DESERET SMALL LETTER OI..OSMANYA LETTER OO +1049E..1049F ; disallowed # NA .. +104A0..104A9 ; valid # 4.0 OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE +104AA..104AF ; disallowed # NA .. +104B0 ; mapped ; 104D8 # 9.0 OSAGE CAPITAL LETTER A +104B1 ; mapped ; 104D9 # 9.0 OSAGE CAPITAL LETTER AI +104B2 ; mapped ; 104DA # 9.0 OSAGE CAPITAL LETTER AIN +104B3 ; mapped ; 104DB # 9.0 OSAGE CAPITAL LETTER AH +104B4 ; mapped ; 104DC # 9.0 OSAGE CAPITAL LETTER BRA +104B5 ; mapped ; 104DD # 9.0 OSAGE CAPITAL LETTER CHA +104B6 ; mapped ; 104DE # 9.0 OSAGE CAPITAL LETTER EHCHA +104B7 ; mapped ; 104DF # 9.0 OSAGE CAPITAL LETTER E +104B8 ; mapped ; 104E0 # 9.0 OSAGE CAPITAL LETTER EIN +104B9 ; mapped ; 104E1 # 9.0 OSAGE CAPITAL LETTER HA +104BA ; mapped ; 104E2 # 9.0 OSAGE CAPITAL LETTER HYA +104BB ; mapped ; 104E3 # 9.0 OSAGE CAPITAL LETTER I +104BC ; mapped ; 104E4 # 9.0 OSAGE CAPITAL LETTER KA +104BD ; mapped ; 104E5 # 9.0 OSAGE CAPITAL LETTER EHKA +104BE ; mapped ; 104E6 # 9.0 OSAGE CAPITAL LETTER KYA +104BF ; mapped ; 104E7 # 9.0 OSAGE CAPITAL LETTER LA +104C0 ; mapped ; 104E8 # 9.0 OSAGE CAPITAL LETTER MA +104C1 ; mapped ; 104E9 # 9.0 OSAGE CAPITAL LETTER NA +104C2 ; mapped ; 104EA # 9.0 OSAGE CAPITAL LETTER O +104C3 ; mapped ; 104EB # 9.0 OSAGE CAPITAL LETTER OIN +104C4 ; mapped ; 104EC # 9.0 OSAGE CAPITAL LETTER PA +104C5 ; mapped ; 104ED # 9.0 OSAGE CAPITAL LETTER EHPA +104C6 ; mapped ; 104EE # 9.0 OSAGE CAPITAL LETTER SA +104C7 ; mapped ; 104EF # 9.0 OSAGE CAPITAL LETTER SHA +104C8 ; mapped ; 104F0 # 9.0 OSAGE CAPITAL LETTER TA +104C9 ; mapped ; 104F1 # 9.0 OSAGE CAPITAL LETTER EHTA +104CA ; mapped ; 104F2 # 9.0 OSAGE CAPITAL LETTER TSA +104CB ; mapped ; 104F3 # 9.0 OSAGE CAPITAL LETTER EHTSA +104CC ; mapped ; 104F4 # 9.0 OSAGE CAPITAL LETTER TSHA +104CD ; mapped ; 104F5 # 9.0 OSAGE CAPITAL LETTER DHA +104CE ; mapped ; 104F6 # 9.0 OSAGE CAPITAL LETTER U +104CF ; mapped ; 104F7 # 9.0 OSAGE CAPITAL LETTER WA +104D0 ; mapped ; 104F8 # 9.0 OSAGE CAPITAL LETTER KHA +104D1 ; mapped ; 104F9 # 9.0 OSAGE CAPITAL LETTER GHA +104D2 ; mapped ; 104FA # 9.0 OSAGE CAPITAL LETTER ZA +104D3 ; mapped ; 104FB # 9.0 OSAGE CAPITAL LETTER ZHA +104D4..104D7 ; disallowed # NA .. +104D8..104FB ; valid # 9.0 OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA +104FC..104FF ; disallowed # NA .. +10500..10527 ; valid # 7.0 ELBASAN LETTER A..ELBASAN LETTER KHE +10528..1052F ; disallowed # NA .. +10530..10563 ; valid # 7.0 CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW +10564..1056E ; disallowed # NA .. +1056F ; valid ; ; NV8 # 7.0 CAUCASIAN ALBANIAN CITATION MARK +10570..105FF ; disallowed # NA .. +10600..10736 ; valid # 7.0 LINEAR A SIGN AB001..LINEAR A SIGN A664 +10737..1073F ; disallowed # NA .. +10740..10755 ; valid # 7.0 LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE +10756..1075F ; disallowed # NA .. +10760..10767 ; valid # 7.0 LINEAR A SIGN A800..LINEAR A SIGN A807 +10768..107FF ; disallowed # NA .. +10800..10805 ; valid # 4.0 CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA +10806..10807 ; disallowed # NA .. +10808 ; valid # 4.0 CYPRIOT SYLLABLE JO +10809 ; disallowed # NA +1080A..10835 ; valid # 4.0 CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO +10836 ; disallowed # NA +10837..10838 ; valid # 4.0 CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE +10839..1083B ; disallowed # NA .. +1083C ; valid # 4.0 CYPRIOT SYLLABLE ZA +1083D..1083E ; disallowed # NA .. +1083F ; valid # 4.0 CYPRIOT SYLLABLE ZO +10840..10855 ; valid # 5.2 IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW +10856 ; disallowed # NA +10857..1085F ; valid ; ; NV8 # 5.2 IMPERIAL ARAMAIC SECTION SIGN..IMPERIAL ARAMAIC NUMBER TEN THOUSAND +10860..10876 ; valid # 7.0 PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW +10877..1087F ; valid ; ; NV8 # 7.0 PALMYRENE LEFT-POINTING FLEURON..PALMYRENE NUMBER TWENTY +10880..1089E ; valid # 7.0 NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW +1089F..108A6 ; disallowed # NA .. +108A7..108AF ; valid ; ; NV8 # 7.0 NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED +108B0..108DF ; disallowed # NA .. +108E0..108F2 ; valid # 8.0 HATRAN LETTER ALEPH..HATRAN LETTER QOPH +108F3 ; disallowed # NA +108F4..108F5 ; valid # 8.0 HATRAN LETTER SHIN..HATRAN LETTER TAW +108F6..108FA ; disallowed # NA .. +108FB..108FF ; valid ; ; NV8 # 8.0 HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED +10900..10915 ; valid # 5.0 PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU +10916..10919 ; valid ; ; NV8 # 5.0 PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER ONE HUNDRED +1091A..1091B ; valid ; ; NV8 # 5.2 PHOENICIAN NUMBER TWO..PHOENICIAN NUMBER THREE +1091C..1091E ; disallowed # NA .. +1091F ; valid ; ; NV8 # 5.0 PHOENICIAN WORD SEPARATOR +10920..10939 ; valid # 5.1 LYDIAN LETTER A..LYDIAN LETTER C +1093A..1093E ; disallowed # NA .. +1093F ; valid ; ; NV8 # 5.1 LYDIAN TRIANGULAR MARK +10940..1097F ; disallowed # NA .. +10980..109B7 ; valid # 6.1 MEROITIC HIEROGLYPHIC LETTER A..MEROITIC CURSIVE LETTER DA +109B8..109BB ; disallowed # NA .. +109BC..109BD ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF +109BE..109BF ; valid # 6.1 MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN +109C0..109CF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY +109D0..109D1 ; disallowed # NA .. +109D2..109FF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS +10A00..10A03 ; valid # 4.1 KHAROSHTHI LETTER A..KHAROSHTHI VOWEL SIGN VOCALIC R +10A04 ; disallowed # NA +10A05..10A06 ; valid # 4.1 KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O +10A07..10A0B ; disallowed # NA .. +10A0C..10A13 ; valid # 4.1 KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI LETTER GHA +10A14 ; disallowed # NA +10A15..10A17 ; valid # 4.1 KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA +10A18 ; disallowed # NA +10A19..10A33 ; valid # 4.1 KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER TTTHA +10A34..10A37 ; disallowed # NA .. +10A38..10A3A ; valid # 4.1 KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW +10A3B..10A3E ; disallowed # NA .. +10A3F ; valid # 4.1 KHAROSHTHI VIRAMA +10A40..10A47 ; valid ; ; NV8 # 4.1 KHAROSHTHI DIGIT ONE..KHAROSHTHI NUMBER ONE THOUSAND +10A48..10A4F ; disallowed # NA .. +10A50..10A58 ; valid ; ; NV8 # 4.1 KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES +10A59..10A5F ; disallowed # NA .. +10A60..10A7C ; valid # 5.2 OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH +10A7D..10A7F ; valid ; ; NV8 # 5.2 OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMERIC INDICATOR +10A80..10A9C ; valid # 7.0 OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH +10A9D..10A9F ; valid ; ; NV8 # 7.0 OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY +10AA0..10ABF ; disallowed # NA .. +10AC0..10AC7 ; valid # 7.0 MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW +10AC8 ; valid ; ; NV8 # 7.0 MANICHAEAN SIGN UD +10AC9..10AE6 ; valid # 7.0 MANICHAEAN LETTER ZAYIN..MANICHAEAN ABBREVIATION MARK BELOW +10AE7..10AEA ; disallowed # NA .. +10AEB..10AF6 ; valid ; ; NV8 # 7.0 MANICHAEAN NUMBER ONE..MANICHAEAN PUNCTUATION LINE FILLER +10AF7..10AFF ; disallowed # NA .. +10B00..10B35 ; valid # 5.2 AVESTAN LETTER A..AVESTAN LETTER HE +10B36..10B38 ; disallowed # NA .. +10B39..10B3F ; valid ; ; NV8 # 5.2 AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION +10B40..10B55 ; valid # 5.2 INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW +10B56..10B57 ; disallowed # NA .. +10B58..10B5F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND +10B60..10B72 ; valid # 5.2 INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW +10B73..10B77 ; disallowed # NA .. +10B78..10B7F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND +10B80..10B91 ; valid # 7.0 PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW +10B92..10B98 ; disallowed # NA .. +10B99..10B9C ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT +10B9D..10BA8 ; disallowed # NA .. +10BA9..10BAF ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED +10BB0..10BFF ; disallowed # NA .. +10C00..10C48 ; valid # 5.2 OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH +10C49..10C7F ; disallowed # NA .. +10C80 ; mapped ; 10CC0 # 8.0 OLD HUNGARIAN CAPITAL LETTER A +10C81 ; mapped ; 10CC1 # 8.0 OLD HUNGARIAN CAPITAL LETTER AA +10C82 ; mapped ; 10CC2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EB +10C83 ; mapped ; 10CC3 # 8.0 OLD HUNGARIAN CAPITAL LETTER AMB +10C84 ; mapped ; 10CC4 # 8.0 OLD HUNGARIAN CAPITAL LETTER EC +10C85 ; mapped ; 10CC5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENC +10C86 ; mapped ; 10CC6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECS +10C87 ; mapped ; 10CC7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ED +10C88 ; mapped ; 10CC8 # 8.0 OLD HUNGARIAN CAPITAL LETTER AND +10C89 ; mapped ; 10CC9 # 8.0 OLD HUNGARIAN CAPITAL LETTER E +10C8A ; mapped ; 10CCA # 8.0 OLD HUNGARIAN CAPITAL LETTER CLOSE E +10C8B ; mapped ; 10CCB # 8.0 OLD HUNGARIAN CAPITAL LETTER EE +10C8C ; mapped ; 10CCC # 8.0 OLD HUNGARIAN CAPITAL LETTER EF +10C8D ; mapped ; 10CCD # 8.0 OLD HUNGARIAN CAPITAL LETTER EG +10C8E ; mapped ; 10CCE # 8.0 OLD HUNGARIAN CAPITAL LETTER EGY +10C8F ; mapped ; 10CCF # 8.0 OLD HUNGARIAN CAPITAL LETTER EH +10C90 ; mapped ; 10CD0 # 8.0 OLD HUNGARIAN CAPITAL LETTER I +10C91 ; mapped ; 10CD1 # 8.0 OLD HUNGARIAN CAPITAL LETTER II +10C92 ; mapped ; 10CD2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EJ +10C93 ; mapped ; 10CD3 # 8.0 OLD HUNGARIAN CAPITAL LETTER EK +10C94 ; mapped ; 10CD4 # 8.0 OLD HUNGARIAN CAPITAL LETTER AK +10C95 ; mapped ; 10CD5 # 8.0 OLD HUNGARIAN CAPITAL LETTER UNK +10C96 ; mapped ; 10CD6 # 8.0 OLD HUNGARIAN CAPITAL LETTER EL +10C97 ; mapped ; 10CD7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ELY +10C98 ; mapped ; 10CD8 # 8.0 OLD HUNGARIAN CAPITAL LETTER EM +10C99 ; mapped ; 10CD9 # 8.0 OLD HUNGARIAN CAPITAL LETTER EN +10C9A ; mapped ; 10CDA # 8.0 OLD HUNGARIAN CAPITAL LETTER ENY +10C9B ; mapped ; 10CDB # 8.0 OLD HUNGARIAN CAPITAL LETTER O +10C9C ; mapped ; 10CDC # 8.0 OLD HUNGARIAN CAPITAL LETTER OO +10C9D ; mapped ; 10CDD # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG OE +10C9E ; mapped ; 10CDE # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA OE +10C9F ; mapped ; 10CDF # 8.0 OLD HUNGARIAN CAPITAL LETTER OEE +10CA0 ; mapped ; 10CE0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EP +10CA1 ; mapped ; 10CE1 # 8.0 OLD HUNGARIAN CAPITAL LETTER EMP +10CA2 ; mapped ; 10CE2 # 8.0 OLD HUNGARIAN CAPITAL LETTER ER +10CA3 ; mapped ; 10CE3 # 8.0 OLD HUNGARIAN CAPITAL LETTER SHORT ER +10CA4 ; mapped ; 10CE4 # 8.0 OLD HUNGARIAN CAPITAL LETTER ES +10CA5 ; mapped ; 10CE5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ESZ +10CA6 ; mapped ; 10CE6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ET +10CA7 ; mapped ; 10CE7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT +10CA8 ; mapped ; 10CE8 # 8.0 OLD HUNGARIAN CAPITAL LETTER ETY +10CA9 ; mapped ; 10CE9 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECH +10CAA ; mapped ; 10CEA # 8.0 OLD HUNGARIAN CAPITAL LETTER U +10CAB ; mapped ; 10CEB # 8.0 OLD HUNGARIAN CAPITAL LETTER UU +10CAC ; mapped ; 10CEC # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG UE +10CAD ; mapped ; 10CED # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA UE +10CAE ; mapped ; 10CEE # 8.0 OLD HUNGARIAN CAPITAL LETTER EV +10CAF ; mapped ; 10CEF # 8.0 OLD HUNGARIAN CAPITAL LETTER EZ +10CB0 ; mapped ; 10CF0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EZS +10CB1 ; mapped ; 10CF1 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT-SHAPED SIGN +10CB2 ; mapped ; 10CF2 # 8.0 OLD HUNGARIAN CAPITAL LETTER US +10CB3..10CBF ; disallowed # NA .. +10CC0..10CF2 ; valid # 8.0 OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US +10CF3..10CF9 ; disallowed # NA .. +10CFA..10CFF ; valid ; ; NV8 # 8.0 OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND +10D00..10E5F ; disallowed # NA .. +10E60..10E7E ; valid ; ; NV8 # 5.2 RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS +10E7F..10FFF ; disallowed # NA .. +11000..11046 ; valid # 6.0 BRAHMI SIGN CANDRABINDU..BRAHMI VIRAMA +11047..1104D ; valid ; ; NV8 # 6.0 BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS +1104E..11051 ; disallowed # NA .. +11052..11065 ; valid ; ; NV8 # 6.0 BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND +11066..1106F ; valid # 6.0 BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE +11070..1107E ; disallowed # NA .. +1107F ; valid # 7.0 BRAHMI NUMBER JOINER +11080..110BA ; valid # 5.2 KAITHI SIGN CANDRABINDU..KAITHI SIGN NUKTA +110BB..110BC ; valid ; ; NV8 # 5.2 KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN +110BD ; disallowed # 5.2 KAITHI NUMBER SIGN +110BE..110C1 ; valid ; ; NV8 # 5.2 KAITHI SECTION MARK..KAITHI DOUBLE DANDA +110C2..110CF ; disallowed # NA .. +110D0..110E8 ; valid # 6.1 SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE +110E9..110EF ; disallowed # NA .. +110F0..110F9 ; valid # 6.1 SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE +110FA..110FF ; disallowed # NA .. +11100..11134 ; valid # 6.1 CHAKMA SIGN CANDRABINDU..CHAKMA MAAYYAA +11135 ; disallowed # NA +11136..1113F ; valid # 6.1 CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE +11140..11143 ; valid ; ; NV8 # 6.1 CHAKMA SECTION MARK..CHAKMA QUESTION MARK +11144..1114F ; disallowed # NA .. +11150..11173 ; valid # 7.0 MAHAJANI LETTER A..MAHAJANI SIGN NUKTA +11174..11175 ; valid ; ; NV8 # 7.0 MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK +11176 ; valid # 7.0 MAHAJANI LIGATURE SHRI +11177..1117F ; disallowed # NA .. +11180..111C4 ; valid # 6.1 SHARADA SIGN CANDRABINDU..SHARADA OM +111C5..111C8 ; valid ; ; NV8 # 6.1 SHARADA DANDA..SHARADA SEPARATOR +111C9 ; valid ; ; NV8 # 8.0 SHARADA SANDHI MARK +111CA..111CC ; valid # 8.0 SHARADA SIGN NUKTA..SHARADA EXTRA SHORT VOWEL MARK +111CD ; valid ; ; NV8 # 7.0 SHARADA SUTRA MARK +111CE..111CF ; disallowed # NA .. +111D0..111D9 ; valid # 6.1 SHARADA DIGIT ZERO..SHARADA DIGIT NINE +111DA ; valid # 7.0 SHARADA EKAM +111DB ; valid ; ; NV8 # 8.0 SHARADA SIGN SIDDHAM +111DC ; valid # 8.0 SHARADA HEADSTROKE +111DD..111DF ; valid ; ; NV8 # 8.0 SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2 +111E0 ; disallowed # NA +111E1..111F4 ; valid ; ; NV8 # 7.0 SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND +111F5..111FF ; disallowed # NA .. +11200..11211 ; valid # 7.0 KHOJKI LETTER A..KHOJKI LETTER JJA +11212 ; disallowed # NA +11213..11237 ; valid # 7.0 KHOJKI LETTER NYA..KHOJKI SIGN SHADDA +11238..1123D ; valid ; ; NV8 # 7.0 KHOJKI DANDA..KHOJKI ABBREVIATION SIGN +1123E ; valid # 9.0 KHOJKI SIGN SUKUN +1123F..1127F ; disallowed # NA .. +11280..11286 ; valid # 8.0 MULTANI LETTER A..MULTANI LETTER GA +11287 ; disallowed # NA +11288 ; valid # 8.0 MULTANI LETTER GHA +11289 ; disallowed # NA +1128A..1128D ; valid # 8.0 MULTANI LETTER CA..MULTANI LETTER JJA +1128E ; disallowed # NA +1128F..1129D ; valid # 8.0 MULTANI LETTER NYA..MULTANI LETTER BA +1129E ; disallowed # NA +1129F..112A8 ; valid # 8.0 MULTANI LETTER BHA..MULTANI LETTER RHA +112A9 ; valid ; ; NV8 # 8.0 MULTANI SECTION MARK +112AA..112AF ; disallowed # NA .. +112B0..112EA ; valid # 7.0 KHUDAWADI LETTER A..KHUDAWADI SIGN VIRAMA +112EB..112EF ; disallowed # NA .. +112F0..112F9 ; valid # 7.0 KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE +112FA..112FF ; disallowed # NA .. +11300 ; valid # 8.0 GRANTHA SIGN COMBINING ANUSVARA ABOVE +11301..11303 ; valid # 7.0 GRANTHA SIGN CANDRABINDU..GRANTHA SIGN VISARGA +11304 ; disallowed # NA +11305..1130C ; valid # 7.0 GRANTHA LETTER A..GRANTHA LETTER VOCALIC L +1130D..1130E ; disallowed # NA .. +1130F..11310 ; valid # 7.0 GRANTHA LETTER EE..GRANTHA LETTER AI +11311..11312 ; disallowed # NA .. +11313..11328 ; valid # 7.0 GRANTHA LETTER OO..GRANTHA LETTER NA +11329 ; disallowed # NA +1132A..11330 ; valid # 7.0 GRANTHA LETTER PA..GRANTHA LETTER RA +11331 ; disallowed # NA +11332..11333 ; valid # 7.0 GRANTHA LETTER LA..GRANTHA LETTER LLA +11334 ; disallowed # NA +11335..11339 ; valid # 7.0 GRANTHA LETTER VA..GRANTHA LETTER HA +1133A..1133B ; disallowed # NA .. +1133C..11344 ; valid # 7.0 GRANTHA SIGN NUKTA..GRANTHA VOWEL SIGN VOCALIC RR +11345..11346 ; disallowed # NA .. +11347..11348 ; valid # 7.0 GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI +11349..1134A ; disallowed # NA .. +1134B..1134D ; valid # 7.0 GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA +1134E..1134F ; disallowed # NA .. +11350 ; valid # 8.0 GRANTHA OM +11351..11356 ; disallowed # NA .. +11357 ; valid # 7.0 GRANTHA AU LENGTH MARK +11358..1135C ; disallowed # NA .. +1135D..11363 ; valid # 7.0 GRANTHA SIGN PLUTA..GRANTHA VOWEL SIGN VOCALIC LL +11364..11365 ; disallowed # NA .. +11366..1136C ; valid # 7.0 COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX +1136D..1136F ; disallowed # NA .. +11370..11374 ; valid # 7.0 COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA +11375..113FF ; disallowed # NA .. +11400..1144A ; valid # 9.0 NEWA LETTER A..NEWA SIDDHI +1144B..1144F ; valid ; ; NV8 # 9.0 NEWA DANDA..NEWA ABBREVIATION SIGN +11450..11459 ; valid # 9.0 NEWA DIGIT ZERO..NEWA DIGIT NINE +1145A ; disallowed # NA +1145B ; valid ; ; NV8 # 9.0 NEWA PLACEHOLDER MARK +1145C ; disallowed # NA +1145D ; valid ; ; NV8 # 9.0 NEWA INSERTION SIGN +1145E..1147F ; disallowed # NA .. +11480..114C5 ; valid # 7.0 TIRHUTA ANJI..TIRHUTA GVANG +114C6 ; valid ; ; NV8 # 7.0 TIRHUTA ABBREVIATION SIGN +114C7 ; valid # 7.0 TIRHUTA OM +114C8..114CF ; disallowed # NA .. +114D0..114D9 ; valid # 7.0 TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE +114DA..1157F ; disallowed # NA .. +11580..115B5 ; valid # 7.0 SIDDHAM LETTER A..SIDDHAM VOWEL SIGN VOCALIC RR +115B6..115B7 ; disallowed # NA .. +115B8..115C0 ; valid # 7.0 SIDDHAM VOWEL SIGN E..SIDDHAM SIGN NUKTA +115C1..115C9 ; valid ; ; NV8 # 7.0 SIDDHAM SIGN SIDDHAM..SIDDHAM END OF TEXT MARK +115CA..115D7 ; valid ; ; NV8 # 8.0 SIDDHAM SECTION MARK WITH TRIDENT AND U-SHAPED ORNAMENTS..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES +115D8..115DD ; valid # 8.0 SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM VOWEL SIGN ALTERNATE UU +115DE..115FF ; disallowed # NA .. +11600..11640 ; valid # 7.0 MODI LETTER A..MODI SIGN ARDHACANDRA +11641..11643 ; valid ; ; NV8 # 7.0 MODI DANDA..MODI ABBREVIATION SIGN +11644 ; valid # 7.0 MODI SIGN HUVA +11645..1164F ; disallowed # NA .. +11650..11659 ; valid # 7.0 MODI DIGIT ZERO..MODI DIGIT NINE +1165A..1165F ; disallowed # NA .. +11660..1166C ; valid ; ; NV8 # 9.0 MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT +1166D..1167F ; disallowed # NA .. +11680..116B7 ; valid # 6.1 TAKRI LETTER A..TAKRI SIGN NUKTA +116B8..116BF ; disallowed # NA .. +116C0..116C9 ; valid # 6.1 TAKRI DIGIT ZERO..TAKRI DIGIT NINE +116CA..116FF ; disallowed # NA .. +11700..11719 ; valid # 8.0 AHOM LETTER KA..AHOM LETTER JHA +1171A..1171C ; disallowed # NA .. +1171D..1172B ; valid # 8.0 AHOM CONSONANT SIGN MEDIAL LA..AHOM SIGN KILLER +1172C..1172F ; disallowed # NA .. +11730..11739 ; valid # 8.0 AHOM DIGIT ZERO..AHOM DIGIT NINE +1173A..1173F ; valid ; ; NV8 # 8.0 AHOM NUMBER TEN..AHOM SYMBOL VI +11740..1189F ; disallowed # NA .. +118A0 ; mapped ; 118C0 # 7.0 WARANG CITI CAPITAL LETTER NGAA +118A1 ; mapped ; 118C1 # 7.0 WARANG CITI CAPITAL LETTER A +118A2 ; mapped ; 118C2 # 7.0 WARANG CITI CAPITAL LETTER WI +118A3 ; mapped ; 118C3 # 7.0 WARANG CITI CAPITAL LETTER YU +118A4 ; mapped ; 118C4 # 7.0 WARANG CITI CAPITAL LETTER YA +118A5 ; mapped ; 118C5 # 7.0 WARANG CITI CAPITAL LETTER YO +118A6 ; mapped ; 118C6 # 7.0 WARANG CITI CAPITAL LETTER II +118A7 ; mapped ; 118C7 # 7.0 WARANG CITI CAPITAL LETTER UU +118A8 ; mapped ; 118C8 # 7.0 WARANG CITI CAPITAL LETTER E +118A9 ; mapped ; 118C9 # 7.0 WARANG CITI CAPITAL LETTER O +118AA ; mapped ; 118CA # 7.0 WARANG CITI CAPITAL LETTER ANG +118AB ; mapped ; 118CB # 7.0 WARANG CITI CAPITAL LETTER GA +118AC ; mapped ; 118CC # 7.0 WARANG CITI CAPITAL LETTER KO +118AD ; mapped ; 118CD # 7.0 WARANG CITI CAPITAL LETTER ENY +118AE ; mapped ; 118CE # 7.0 WARANG CITI CAPITAL LETTER YUJ +118AF ; mapped ; 118CF # 7.0 WARANG CITI CAPITAL LETTER UC +118B0 ; mapped ; 118D0 # 7.0 WARANG CITI CAPITAL LETTER ENN +118B1 ; mapped ; 118D1 # 7.0 WARANG CITI CAPITAL LETTER ODD +118B2 ; mapped ; 118D2 # 7.0 WARANG CITI CAPITAL LETTER TTE +118B3 ; mapped ; 118D3 # 7.0 WARANG CITI CAPITAL LETTER NUNG +118B4 ; mapped ; 118D4 # 7.0 WARANG CITI CAPITAL LETTER DA +118B5 ; mapped ; 118D5 # 7.0 WARANG CITI CAPITAL LETTER AT +118B6 ; mapped ; 118D6 # 7.0 WARANG CITI CAPITAL LETTER AM +118B7 ; mapped ; 118D7 # 7.0 WARANG CITI CAPITAL LETTER BU +118B8 ; mapped ; 118D8 # 7.0 WARANG CITI CAPITAL LETTER PU +118B9 ; mapped ; 118D9 # 7.0 WARANG CITI CAPITAL LETTER HIYO +118BA ; mapped ; 118DA # 7.0 WARANG CITI CAPITAL LETTER HOLO +118BB ; mapped ; 118DB # 7.0 WARANG CITI CAPITAL LETTER HORR +118BC ; mapped ; 118DC # 7.0 WARANG CITI CAPITAL LETTER HAR +118BD ; mapped ; 118DD # 7.0 WARANG CITI CAPITAL LETTER SSUU +118BE ; mapped ; 118DE # 7.0 WARANG CITI CAPITAL LETTER SII +118BF ; mapped ; 118DF # 7.0 WARANG CITI CAPITAL LETTER VIYO +118C0..118E9 ; valid # 7.0 WARANG CITI SMALL LETTER NGAA..WARANG CITI DIGIT NINE +118EA..118F2 ; valid ; ; NV8 # 7.0 WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY +118F3..118FE ; disallowed # NA .. +118FF ; valid # 7.0 WARANG CITI OM +11900..11ABF ; disallowed # NA .. +11AC0..11AF8 ; valid # 7.0 PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL +11AF9..11BFF ; disallowed # NA .. +11C00..11C08 ; valid # 9.0 BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L +11C09 ; disallowed # NA +11C0A..11C36 ; valid # 9.0 BHAIKSUKI LETTER E..BHAIKSUKI VOWEL SIGN VOCALIC L +11C37 ; disallowed # NA +11C38..11C40 ; valid # 9.0 BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN AVAGRAHA +11C41..11C45 ; valid ; ; NV8 # 9.0 BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2 +11C46..11C4F ; disallowed # NA .. +11C50..11C59 ; valid # 9.0 BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE +11C5A..11C6C ; valid ; ; NV8 # 9.0 BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK +11C6D..11C6F ; disallowed # NA .. +11C70..11C71 ; valid ; ; NV8 # 9.0 MARCHEN HEAD MARK..MARCHEN MARK SHAD +11C72..11C8F ; valid # 9.0 MARCHEN LETTER KA..MARCHEN LETTER A +11C90..11C91 ; disallowed # NA .. +11C92..11CA7 ; valid # 9.0 MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA +11CA8 ; disallowed # NA +11CA9..11CB6 ; valid # 9.0 MARCHEN SUBJOINED LETTER YA..MARCHEN SIGN CANDRABINDU +11CB7..11FFF ; disallowed # NA .. +12000..1236E ; valid # 5.0 CUNEIFORM SIGN A..CUNEIFORM SIGN ZUM +1236F..12398 ; valid # 7.0 CUNEIFORM SIGN KAP ELAMITE..CUNEIFORM SIGN UM TIMES ME +12399 ; valid # 8.0 CUNEIFORM SIGN U U +1239A..123FF ; disallowed # NA .. +12400..12462 ; valid ; ; NV8 # 5.0 CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN OLD ASSYRIAN ONE QUARTER +12463..1246E ; valid ; ; NV8 # 7.0 CUNEIFORM NUMERIC SIGN ONE QUARTER GUR..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM +1246F ; disallowed # NA +12470..12473 ; valid ; ; NV8 # 5.0 CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL TRICOLON +12474 ; valid ; ; NV8 # 7.0 CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON +12475..1247F ; disallowed # NA .. +12480..12543 ; valid # 8.0 CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU +12544..12FFF ; disallowed # NA .. +13000..1342E ; valid # 5.2 EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 +1342F..143FF ; disallowed # NA .. +14400..14646 ; valid # 8.0 ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 +14647..167FF ; disallowed # NA .. +16800..16A38 ; valid # 6.0 BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ +16A39..16A3F ; disallowed # NA .. +16A40..16A5E ; valid # 7.0 MRO LETTER TA..MRO LETTER TEK +16A5F ; disallowed # NA +16A60..16A69 ; valid # 7.0 MRO DIGIT ZERO..MRO DIGIT NINE +16A6A..16A6D ; disallowed # NA .. +16A6E..16A6F ; valid ; ; NV8 # 7.0 MRO DANDA..MRO DOUBLE DANDA +16A70..16ACF ; disallowed # NA .. +16AD0..16AED ; valid # 7.0 BASSA VAH LETTER ENNI..BASSA VAH LETTER I +16AEE..16AEF ; disallowed # NA .. +16AF0..16AF4 ; valid # 7.0 BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE +16AF5 ; valid ; ; NV8 # 7.0 BASSA VAH FULL STOP +16AF6..16AFF ; disallowed # NA .. +16B00..16B36 ; valid # 7.0 PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG MARK CIM TAUM +16B37..16B3F ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN XYEEM FAIB +16B40..16B43 ; valid # 7.0 PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM +16B44..16B45 ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN XAUS..PAHAWH HMONG SIGN CIM TSOV ROG +16B46..16B4F ; disallowed # NA .. +16B50..16B59 ; valid # 7.0 PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE +16B5A ; disallowed # NA +16B5B..16B61 ; valid ; ; NV8 # 7.0 PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS +16B62 ; disallowed # NA +16B63..16B77 ; valid # 7.0 PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS +16B78..16B7C ; disallowed # NA .. +16B7D..16B8F ; valid # 7.0 PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ +16B90..16EFF ; disallowed # NA .. +16F00..16F44 ; valid # 6.1 MIAO LETTER PA..MIAO LETTER HHA +16F45..16F4F ; disallowed # NA .. +16F50..16F7E ; valid # 6.1 MIAO LETTER NASALIZATION..MIAO VOWEL SIGN NG +16F7F..16F8E ; disallowed # NA .. +16F8F..16F9F ; valid # 6.1 MIAO TONE RIGHT..MIAO LETTER REFORMED TONE-8 +16FA0..16FDF ; disallowed # NA .. +16FE0 ; valid # 9.0 TANGUT ITERATION MARK +16FE1..16FFF ; disallowed # NA .. +17000..187EC ; valid # 9.0 TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187EC +187ED..187FF ; disallowed # NA .. +18800..18AF2 ; valid # 9.0 TANGUT COMPONENT-001..TANGUT COMPONENT-755 +18AF3..1AFFF ; disallowed # NA .. +1B000..1B001 ; valid # 6.0 KATAKANA LETTER ARCHAIC E..HIRAGANA LETTER ARCHAIC YE +1B002..1BBFF ; disallowed # NA .. +1BC00..1BC6A ; valid # 7.0 DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M +1BC6B..1BC6F ; disallowed # NA .. +1BC70..1BC7C ; valid # 7.0 DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK +1BC7D..1BC7F ; disallowed # NA .. +1BC80..1BC88 ; valid # 7.0 DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL +1BC89..1BC8F ; disallowed # NA .. +1BC90..1BC99 ; valid # 7.0 DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW +1BC9A..1BC9B ; disallowed # NA .. +1BC9C ; valid ; ; NV8 # 7.0 DUPLOYAN SIGN O WITH CROSS +1BC9D..1BC9E ; valid # 7.0 DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK +1BC9F ; valid ; ; NV8 # 7.0 DUPLOYAN PUNCTUATION CHINOOK FULL STOP +1BCA0..1BCA3 ; ignored # 7.0 SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP +1BCA4..1CFFF ; disallowed # NA .. +1D000..1D0F5 ; valid ; ; NV8 # 3.1 BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO +1D0F6..1D0FF ; disallowed # NA .. +1D100..1D126 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2 +1D127..1D128 ; disallowed # NA .. +1D129 ; valid ; ; NV8 # 5.1 MUSICAL SYMBOL MULTIPLE MEASURE REST +1D12A..1D15D ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL DOUBLE SHARP..MUSICAL SYMBOL WHOLE NOTE +1D15E ; mapped ; 1D157 1D165 # 3.1 MUSICAL SYMBOL HALF NOTE +1D15F ; mapped ; 1D158 1D165 # 3.1 MUSICAL SYMBOL QUARTER NOTE +1D160 ; mapped ; 1D158 1D165 1D16E #3.1 MUSICAL SYMBOL EIGHTH NOTE +1D161 ; mapped ; 1D158 1D165 1D16F #3.1 MUSICAL SYMBOL SIXTEENTH NOTE +1D162 ; mapped ; 1D158 1D165 1D170 #3.1 MUSICAL SYMBOL THIRTY-SECOND NOTE +1D163 ; mapped ; 1D158 1D165 1D171 #3.1 MUSICAL SYMBOL SIXTY-FOURTH NOTE +1D164 ; mapped ; 1D158 1D165 1D172 #3.1 MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE +1D165..1D172 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING FLAG-5 +1D173..1D17A ; disallowed # 3.1 MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE +1D17B..1D1BA ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL SEMIBREVIS BLACK +1D1BB ; mapped ; 1D1B9 1D165 # 3.1 MUSICAL SYMBOL MINIMA +1D1BC ; mapped ; 1D1BA 1D165 # 3.1 MUSICAL SYMBOL MINIMA BLACK +1D1BD ; mapped ; 1D1B9 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA WHITE +1D1BE ; mapped ; 1D1BA 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA BLACK +1D1BF ; mapped ; 1D1B9 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA WHITE +1D1C0 ; mapped ; 1D1BA 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA BLACK +1D1C1..1D1DD ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL LONGA PERFECTA REST..MUSICAL SYMBOL PES SUBPUNCTIS +1D1DE..1D1E8 ; valid ; ; NV8 # 8.0 MUSICAL SYMBOL KIEVAN C CLEF..MUSICAL SYMBOL KIEVAN FLAT SIGN +1D1E9..1D1FF ; disallowed # NA .. +1D200..1D245 ; valid ; ; NV8 # 4.1 GREEK VOCAL NOTATION SYMBOL-1..GREEK MUSICAL LEIMMA +1D246..1D2FF ; disallowed # NA .. +1D300..1D356 ; valid ; ; NV8 # 4.0 MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING +1D357..1D35F ; disallowed # NA .. +1D360..1D371 ; valid ; ; NV8 # 5.0 COUNTING ROD UNIT DIGIT ONE..COUNTING ROD TENS DIGIT NINE +1D372..1D3FF ; disallowed # NA .. +1D400 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD CAPITAL A +1D401 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD CAPITAL B +1D402 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD CAPITAL C +1D403 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD CAPITAL D +1D404 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD CAPITAL E +1D405 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD CAPITAL F +1D406 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD CAPITAL G +1D407 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD CAPITAL H +1D408 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD CAPITAL I +1D409 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD CAPITAL J +1D40A ; mapped ; 006B # 3.1 MATHEMATICAL BOLD CAPITAL K +1D40B ; mapped ; 006C # 3.1 MATHEMATICAL BOLD CAPITAL L +1D40C ; mapped ; 006D # 3.1 MATHEMATICAL BOLD CAPITAL M +1D40D ; mapped ; 006E # 3.1 MATHEMATICAL BOLD CAPITAL N +1D40E ; mapped ; 006F # 3.1 MATHEMATICAL BOLD CAPITAL O +1D40F ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD CAPITAL P +1D410 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD CAPITAL Q +1D411 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD CAPITAL R +1D412 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD CAPITAL S +1D413 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD CAPITAL T +1D414 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD CAPITAL U +1D415 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD CAPITAL V +1D416 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD CAPITAL W +1D417 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD CAPITAL X +1D418 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD CAPITAL Y +1D419 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD CAPITAL Z +1D41A ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SMALL A +1D41B ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SMALL B +1D41C ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SMALL C +1D41D ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SMALL D +1D41E ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SMALL E +1D41F ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SMALL F +1D420 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SMALL G +1D421 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SMALL H +1D422 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SMALL I +1D423 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SMALL J +1D424 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SMALL K +1D425 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SMALL L +1D426 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SMALL M +1D427 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SMALL N +1D428 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SMALL O +1D429 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SMALL P +1D42A ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SMALL Q +1D42B ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SMALL R +1D42C ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SMALL S +1D42D ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SMALL T +1D42E ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SMALL U +1D42F ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SMALL V +1D430 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SMALL W +1D431 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SMALL X +1D432 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SMALL Y +1D433 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SMALL Z +1D434 ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC CAPITAL A +1D435 ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC CAPITAL B +1D436 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC CAPITAL C +1D437 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC CAPITAL D +1D438 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC CAPITAL E +1D439 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC CAPITAL F +1D43A ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC CAPITAL G +1D43B ; mapped ; 0068 # 3.1 MATHEMATICAL ITALIC CAPITAL H +1D43C ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC CAPITAL I +1D43D ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC CAPITAL J +1D43E ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC CAPITAL K +1D43F ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC CAPITAL L +1D440 ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC CAPITAL M +1D441 ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC CAPITAL N +1D442 ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC CAPITAL O +1D443 ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC CAPITAL P +1D444 ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC CAPITAL Q +1D445 ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC CAPITAL R +1D446 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC CAPITAL S +1D447 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC CAPITAL T +1D448 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC CAPITAL U +1D449 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC CAPITAL V +1D44A ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC CAPITAL W +1D44B ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC CAPITAL X +1D44C ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC CAPITAL Y +1D44D ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC CAPITAL Z +1D44E ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC SMALL A +1D44F ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC SMALL B +1D450 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC SMALL C +1D451 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC SMALL D +1D452 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC SMALL E +1D453 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC SMALL F +1D454 ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC SMALL G +1D455 ; disallowed # NA +1D456 ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC SMALL I +1D457 ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC SMALL J +1D458 ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC SMALL K +1D459 ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC SMALL L +1D45A ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC SMALL M +1D45B ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC SMALL N +1D45C ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC SMALL O +1D45D ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC SMALL P +1D45E ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC SMALL Q +1D45F ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC SMALL R +1D460 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC SMALL S +1D461 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC SMALL T +1D462 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC SMALL U +1D463 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC SMALL V +1D464 ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC SMALL W +1D465 ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC SMALL X +1D466 ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC SMALL Y +1D467 ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC SMALL Z +1D468 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL A +1D469 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL B +1D46A ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL C +1D46B ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL D +1D46C ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL E +1D46D ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL F +1D46E ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL G +1D46F ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL H +1D470 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL I +1D471 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL J +1D472 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL K +1D473 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL L +1D474 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL M +1D475 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL N +1D476 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL O +1D477 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL P +1D478 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Q +1D479 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL R +1D47A ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL S +1D47B ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL T +1D47C ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL U +1D47D ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL V +1D47E ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL W +1D47F ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL X +1D480 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Y +1D481 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Z +1D482 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC SMALL A +1D483 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC SMALL B +1D484 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC SMALL C +1D485 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC SMALL D +1D486 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC SMALL E +1D487 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC SMALL F +1D488 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC SMALL G +1D489 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC SMALL H +1D48A ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC SMALL I +1D48B ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC SMALL J +1D48C ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC SMALL K +1D48D ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC SMALL L +1D48E ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC SMALL M +1D48F ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC SMALL N +1D490 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC SMALL O +1D491 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC SMALL P +1D492 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Q +1D493 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC SMALL R +1D494 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC SMALL S +1D495 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC SMALL T +1D496 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC SMALL U +1D497 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC SMALL V +1D498 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC SMALL W +1D499 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC SMALL X +1D49A ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Y +1D49B ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC SMALL Z +1D49C ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT CAPITAL A +1D49D ; disallowed # NA +1D49E ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT CAPITAL C +1D49F ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT CAPITAL D +1D4A0..1D4A1 ; disallowed # NA .. +1D4A2 ; mapped ; 0067 # 3.1 MATHEMATICAL SCRIPT CAPITAL G +1D4A3..1D4A4 ; disallowed # NA .. +1D4A5 ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT CAPITAL J +1D4A6 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT CAPITAL K +1D4A7..1D4A8 ; disallowed # NA .. +1D4A9 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT CAPITAL N +1D4AA ; mapped ; 006F # 3.1 MATHEMATICAL SCRIPT CAPITAL O +1D4AB ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT CAPITAL P +1D4AC ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT CAPITAL Q +1D4AD ; disallowed # NA +1D4AE ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT CAPITAL S +1D4AF ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT CAPITAL T +1D4B0 ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT CAPITAL U +1D4B1 ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT CAPITAL V +1D4B2 ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT CAPITAL W +1D4B3 ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT CAPITAL X +1D4B4 ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT CAPITAL Y +1D4B5 ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT CAPITAL Z +1D4B6 ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT SMALL A +1D4B7 ; mapped ; 0062 # 3.1 MATHEMATICAL SCRIPT SMALL B +1D4B8 ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT SMALL C +1D4B9 ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT SMALL D +1D4BA ; disallowed # NA +1D4BB ; mapped ; 0066 # 3.1 MATHEMATICAL SCRIPT SMALL F +1D4BC ; disallowed # NA +1D4BD ; mapped ; 0068 # 3.1 MATHEMATICAL SCRIPT SMALL H +1D4BE ; mapped ; 0069 # 3.1 MATHEMATICAL SCRIPT SMALL I +1D4BF ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT SMALL J +1D4C0 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT SMALL K +1D4C1 ; mapped ; 006C # 4.0 MATHEMATICAL SCRIPT SMALL L +1D4C2 ; mapped ; 006D # 3.1 MATHEMATICAL SCRIPT SMALL M +1D4C3 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT SMALL N +1D4C4 ; disallowed # NA +1D4C5 ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT SMALL P +1D4C6 ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT SMALL Q +1D4C7 ; mapped ; 0072 # 3.1 MATHEMATICAL SCRIPT SMALL R +1D4C8 ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT SMALL S +1D4C9 ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT SMALL T +1D4CA ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT SMALL U +1D4CB ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT SMALL V +1D4CC ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT SMALL W +1D4CD ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT SMALL X +1D4CE ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT SMALL Y +1D4CF ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT SMALL Z +1D4D0 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL A +1D4D1 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL B +1D4D2 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL C +1D4D3 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL D +1D4D4 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL E +1D4D5 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL F +1D4D6 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL G +1D4D7 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL H +1D4D8 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL I +1D4D9 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL J +1D4DA ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL K +1D4DB ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL L +1D4DC ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL M +1D4DD ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL N +1D4DE ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL O +1D4DF ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL P +1D4E0 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Q +1D4E1 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL R +1D4E2 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL S +1D4E3 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL T +1D4E4 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL U +1D4E5 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL V +1D4E6 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL W +1D4E7 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL X +1D4E8 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Y +1D4E9 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Z +1D4EA ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL A +1D4EB ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL B +1D4EC ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL C +1D4ED ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL D +1D4EE ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL E +1D4EF ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL F +1D4F0 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL G +1D4F1 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL H +1D4F2 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL I +1D4F3 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL J +1D4F4 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT SMALL K +1D4F5 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT SMALL L +1D4F6 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT SMALL M +1D4F7 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT SMALL N +1D4F8 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT SMALL O +1D4F9 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL P +1D4FA ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Q +1D4FB ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL R +1D4FC ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL S +1D4FD ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL T +1D4FE ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL U +1D4FF ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL V +1D500 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL W +1D501 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL X +1D502 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Y +1D503 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Z +1D504 ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR CAPITAL A +1D505 ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR CAPITAL B +1D506 ; disallowed # NA +1D507 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR CAPITAL D +1D508 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR CAPITAL E +1D509 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR CAPITAL F +1D50A ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR CAPITAL G +1D50B..1D50C ; disallowed # NA .. +1D50D ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR CAPITAL J +1D50E ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR CAPITAL K +1D50F ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR CAPITAL L +1D510 ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR CAPITAL M +1D511 ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR CAPITAL N +1D512 ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR CAPITAL O +1D513 ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR CAPITAL P +1D514 ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Q +1D515 ; disallowed # NA +1D516 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR CAPITAL S +1D517 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR CAPITAL T +1D518 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR CAPITAL U +1D519 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR CAPITAL V +1D51A ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR CAPITAL W +1D51B ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR CAPITAL X +1D51C ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Y +1D51D ; disallowed # NA +1D51E ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR SMALL A +1D51F ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR SMALL B +1D520 ; mapped ; 0063 # 3.1 MATHEMATICAL FRAKTUR SMALL C +1D521 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR SMALL D +1D522 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR SMALL E +1D523 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR SMALL F +1D524 ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR SMALL G +1D525 ; mapped ; 0068 # 3.1 MATHEMATICAL FRAKTUR SMALL H +1D526 ; mapped ; 0069 # 3.1 MATHEMATICAL FRAKTUR SMALL I +1D527 ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR SMALL J +1D528 ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR SMALL K +1D529 ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR SMALL L +1D52A ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR SMALL M +1D52B ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR SMALL N +1D52C ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR SMALL O +1D52D ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR SMALL P +1D52E ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR SMALL Q +1D52F ; mapped ; 0072 # 3.1 MATHEMATICAL FRAKTUR SMALL R +1D530 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR SMALL S +1D531 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR SMALL T +1D532 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR SMALL U +1D533 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR SMALL V +1D534 ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR SMALL W +1D535 ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR SMALL X +1D536 ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR SMALL Y +1D537 ; mapped ; 007A # 3.1 MATHEMATICAL FRAKTUR SMALL Z +1D538 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL A +1D539 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL B +1D53A ; disallowed # NA +1D53B ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL D +1D53C ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL E +1D53D ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL F +1D53E ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL G +1D53F ; disallowed # NA +1D540 ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL I +1D541 ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL J +1D542 ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL K +1D543 ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL L +1D544 ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL M +1D545 ; disallowed # NA +1D546 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL O +1D547..1D549 ; disallowed # NA .. +1D54A ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL S +1D54B ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL T +1D54C ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL U +1D54D ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL V +1D54E ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL W +1D54F ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL X +1D550 ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL Y +1D551 ; disallowed # NA +1D552 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL A +1D553 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL B +1D554 ; mapped ; 0063 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL C +1D555 ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL D +1D556 ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL E +1D557 ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL F +1D558 ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL G +1D559 ; mapped ; 0068 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL H +1D55A ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL I +1D55B ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL J +1D55C ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL K +1D55D ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL L +1D55E ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL M +1D55F ; mapped ; 006E # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL N +1D560 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL O +1D561 ; mapped ; 0070 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL P +1D562 ; mapped ; 0071 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Q +1D563 ; mapped ; 0072 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL R +1D564 ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL S +1D565 ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL T +1D566 ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL U +1D567 ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL V +1D568 ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL W +1D569 ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL X +1D56A ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Y +1D56B ; mapped ; 007A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Z +1D56C ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL A +1D56D ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL B +1D56E ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL C +1D56F ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL D +1D570 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL E +1D571 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL F +1D572 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL G +1D573 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL H +1D574 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL I +1D575 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL J +1D576 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL K +1D577 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL L +1D578 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL M +1D579 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL N +1D57A ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL O +1D57B ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL P +1D57C ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Q +1D57D ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL R +1D57E ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL S +1D57F ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL T +1D580 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL U +1D581 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL V +1D582 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL W +1D583 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL X +1D584 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Y +1D585 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Z +1D586 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL A +1D587 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL B +1D588 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL C +1D589 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL D +1D58A ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL E +1D58B ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL F +1D58C ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL G +1D58D ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL H +1D58E ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL I +1D58F ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL J +1D590 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL K +1D591 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL L +1D592 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL M +1D593 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL N +1D594 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL O +1D595 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL P +1D596 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Q +1D597 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL R +1D598 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL S +1D599 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL T +1D59A ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL U +1D59B ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL V +1D59C ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL W +1D59D ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL X +1D59E ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Y +1D59F ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Z +1D5A0 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL A +1D5A1 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL B +1D5A2 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL C +1D5A3 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL D +1D5A4 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL E +1D5A5 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL F +1D5A6 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL G +1D5A7 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL H +1D5A8 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL I +1D5A9 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL J +1D5AA ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF CAPITAL K +1D5AB ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF CAPITAL L +1D5AC ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF CAPITAL M +1D5AD ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF CAPITAL N +1D5AE ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF CAPITAL O +1D5AF ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL P +1D5B0 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Q +1D5B1 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL R +1D5B2 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL S +1D5B3 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL T +1D5B4 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL U +1D5B5 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL V +1D5B6 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL W +1D5B7 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL X +1D5B8 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Y +1D5B9 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Z +1D5BA ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF SMALL A +1D5BB ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF SMALL B +1D5BC ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF SMALL C +1D5BD ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF SMALL D +1D5BE ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF SMALL E +1D5BF ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF SMALL F +1D5C0 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF SMALL G +1D5C1 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF SMALL H +1D5C2 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF SMALL I +1D5C3 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF SMALL J +1D5C4 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF SMALL K +1D5C5 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF SMALL L +1D5C6 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF SMALL M +1D5C7 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF SMALL N +1D5C8 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF SMALL O +1D5C9 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF SMALL P +1D5CA ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF SMALL Q +1D5CB ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF SMALL R +1D5CC ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF SMALL S +1D5CD ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF SMALL T +1D5CE ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF SMALL U +1D5CF ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF SMALL V +1D5D0 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF SMALL W +1D5D1 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF SMALL X +1D5D2 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF SMALL Y +1D5D3 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF SMALL Z +1D5D4 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL A +1D5D5 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL B +1D5D6 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL C +1D5D7 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL D +1D5D8 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL E +1D5D9 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL F +1D5DA ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL G +1D5DB ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL H +1D5DC ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL I +1D5DD ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL J +1D5DE ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL K +1D5DF ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL L +1D5E0 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL M +1D5E1 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL N +1D5E2 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL O +1D5E3 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL P +1D5E4 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Q +1D5E5 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL R +1D5E6 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL S +1D5E7 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL T +1D5E8 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL U +1D5E9 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL V +1D5EA ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL W +1D5EB ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL X +1D5EC ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Y +1D5ED ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Z +1D5EE ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL A +1D5EF ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL B +1D5F0 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL C +1D5F1 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL D +1D5F2 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL E +1D5F3 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL F +1D5F4 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL G +1D5F5 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL H +1D5F6 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL I +1D5F7 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL J +1D5F8 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL K +1D5F9 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL L +1D5FA ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL M +1D5FB ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL N +1D5FC ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL O +1D5FD ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL P +1D5FE ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Q +1D5FF ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL R +1D600 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL S +1D601 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL T +1D602 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL U +1D603 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL V +1D604 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL W +1D605 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL X +1D606 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Y +1D607 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Z +1D608 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL A +1D609 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL B +1D60A ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL C +1D60B ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL D +1D60C ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL E +1D60D ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL F +1D60E ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL G +1D60F ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL H +1D610 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL I +1D611 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL J +1D612 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL K +1D613 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL L +1D614 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL M +1D615 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL N +1D616 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL O +1D617 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL P +1D618 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Q +1D619 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL R +1D61A ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL S +1D61B ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL T +1D61C ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL U +1D61D ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL V +1D61E ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL W +1D61F ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL X +1D620 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Y +1D621 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Z +1D622 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL A +1D623 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL B +1D624 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL C +1D625 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL D +1D626 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL E +1D627 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL F +1D628 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL G +1D629 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL H +1D62A ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL I +1D62B ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL J +1D62C ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL K +1D62D ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL L +1D62E ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL M +1D62F ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL N +1D630 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL O +1D631 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL P +1D632 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Q +1D633 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL R +1D634 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL S +1D635 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL T +1D636 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL U +1D637 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL V +1D638 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL W +1D639 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL X +1D63A ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Y +1D63B ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Z +1D63C ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL A +1D63D ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL B +1D63E ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL C +1D63F ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL D +1D640 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL E +1D641 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL F +1D642 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL G +1D643 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL H +1D644 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL I +1D645 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL J +1D646 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL K +1D647 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL L +1D648 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL M +1D649 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL N +1D64A ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL O +1D64B ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL P +1D64C ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Q +1D64D ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL R +1D64E ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL S +1D64F ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL T +1D650 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL U +1D651 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL V +1D652 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL W +1D653 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL X +1D654 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Y +1D655 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Z +1D656 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL A +1D657 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL B +1D658 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL C +1D659 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL D +1D65A ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL E +1D65B ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL F +1D65C ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL G +1D65D ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL H +1D65E ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL I +1D65F ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL J +1D660 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL K +1D661 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL L +1D662 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL M +1D663 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL N +1D664 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL O +1D665 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL P +1D666 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Q +1D667 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL R +1D668 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL S +1D669 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL T +1D66A ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL U +1D66B ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL V +1D66C ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL W +1D66D ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL X +1D66E ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Y +1D66F ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Z +1D670 ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE CAPITAL A +1D671 ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE CAPITAL B +1D672 ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE CAPITAL C +1D673 ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE CAPITAL D +1D674 ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE CAPITAL E +1D675 ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE CAPITAL F +1D676 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE CAPITAL G +1D677 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE CAPITAL H +1D678 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE CAPITAL I +1D679 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE CAPITAL J +1D67A ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE CAPITAL K +1D67B ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE CAPITAL L +1D67C ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE CAPITAL M +1D67D ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE CAPITAL N +1D67E ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE CAPITAL O +1D67F ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE CAPITAL P +1D680 ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Q +1D681 ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE CAPITAL R +1D682 ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE CAPITAL S +1D683 ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE CAPITAL T +1D684 ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE CAPITAL U +1D685 ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE CAPITAL V +1D686 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE CAPITAL W +1D687 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE CAPITAL X +1D688 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Y +1D689 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE CAPITAL Z +1D68A ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE SMALL A +1D68B ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE SMALL B +1D68C ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE SMALL C +1D68D ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE SMALL D +1D68E ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE SMALL E +1D68F ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE SMALL F +1D690 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE SMALL G +1D691 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE SMALL H +1D692 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE SMALL I +1D693 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE SMALL J +1D694 ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE SMALL K +1D695 ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE SMALL L +1D696 ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE SMALL M +1D697 ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE SMALL N +1D698 ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE SMALL O +1D699 ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE SMALL P +1D69A ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE SMALL Q +1D69B ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE SMALL R +1D69C ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE SMALL S +1D69D ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE SMALL T +1D69E ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE SMALL U +1D69F ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE SMALL V +1D6A0 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE SMALL W +1D6A1 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE SMALL X +1D6A2 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE SMALL Y +1D6A3 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE SMALL Z +1D6A4 ; mapped ; 0131 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS I +1D6A5 ; mapped ; 0237 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS J +1D6A6..1D6A7 ; disallowed # NA .. +1D6A8 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD CAPITAL ALPHA +1D6A9 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD CAPITAL BETA +1D6AA ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD CAPITAL GAMMA +1D6AB ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD CAPITAL DELTA +1D6AC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD CAPITAL EPSILON +1D6AD ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD CAPITAL ZETA +1D6AE ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD CAPITAL ETA +1D6AF ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA +1D6B0 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD CAPITAL IOTA +1D6B1 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD CAPITAL KAPPA +1D6B2 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD CAPITAL LAMDA +1D6B3 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD CAPITAL MU +1D6B4 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD CAPITAL NU +1D6B5 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD CAPITAL XI +1D6B6 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD CAPITAL OMICRON +1D6B7 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD CAPITAL PI +1D6B8 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD CAPITAL RHO +1D6B9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA SYMBOL +1D6BA ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD CAPITAL SIGMA +1D6BB ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD CAPITAL TAU +1D6BC ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD CAPITAL UPSILON +1D6BD ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD CAPITAL PHI +1D6BE ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD CAPITAL CHI +1D6BF ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD CAPITAL PSI +1D6C0 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD CAPITAL OMEGA +1D6C1 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD NABLA +1D6C2 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD SMALL ALPHA +1D6C3 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD SMALL BETA +1D6C4 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD SMALL GAMMA +1D6C5 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD SMALL DELTA +1D6C6 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD SMALL EPSILON +1D6C7 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD SMALL ZETA +1D6C8 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD SMALL ETA +1D6C9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD SMALL THETA +1D6CA ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD SMALL IOTA +1D6CB ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD SMALL KAPPA +1D6CC ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD SMALL LAMDA +1D6CD ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD SMALL MU +1D6CE ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD SMALL NU +1D6CF ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD SMALL XI +1D6D0 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD SMALL OMICRON +1D6D1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD SMALL PI +1D6D2 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD SMALL RHO +1D6D3..1D6D4 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD SMALL FINAL SIGMA..MATHEMATICAL BOLD SMALL SIGMA +1D6D5 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD SMALL TAU +1D6D6 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD SMALL UPSILON +1D6D7 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD SMALL PHI +1D6D8 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD SMALL CHI +1D6D9 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD SMALL PSI +1D6DA ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD SMALL OMEGA +1D6DB ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD PARTIAL DIFFERENTIAL +1D6DC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD EPSILON SYMBOL +1D6DD ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD THETA SYMBOL +1D6DE ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD KAPPA SYMBOL +1D6DF ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD PHI SYMBOL +1D6E0 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD RHO SYMBOL +1D6E1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD PI SYMBOL +1D6E2 ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC CAPITAL ALPHA +1D6E3 ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC CAPITAL BETA +1D6E4 ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC CAPITAL GAMMA +1D6E5 ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC CAPITAL DELTA +1D6E6 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC CAPITAL EPSILON +1D6E7 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC CAPITAL ZETA +1D6E8 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC CAPITAL ETA +1D6E9 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA +1D6EA ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC CAPITAL IOTA +1D6EB ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC CAPITAL KAPPA +1D6EC ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC CAPITAL LAMDA +1D6ED ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC CAPITAL MU +1D6EE ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC CAPITAL NU +1D6EF ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC CAPITAL XI +1D6F0 ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC CAPITAL OMICRON +1D6F1 ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC CAPITAL PI +1D6F2 ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC CAPITAL RHO +1D6F3 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA SYMBOL +1D6F4 ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC CAPITAL SIGMA +1D6F5 ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC CAPITAL TAU +1D6F6 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC CAPITAL UPSILON +1D6F7 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC CAPITAL PHI +1D6F8 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC CAPITAL CHI +1D6F9 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC CAPITAL PSI +1D6FA ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC CAPITAL OMEGA +1D6FB ; mapped ; 2207 # 3.1 MATHEMATICAL ITALIC NABLA +1D6FC ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC SMALL ALPHA +1D6FD ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC SMALL BETA +1D6FE ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC SMALL GAMMA +1D6FF ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC SMALL DELTA +1D700 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC SMALL EPSILON +1D701 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC SMALL ZETA +1D702 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC SMALL ETA +1D703 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC SMALL THETA +1D704 ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC SMALL IOTA +1D705 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC SMALL KAPPA +1D706 ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC SMALL LAMDA +1D707 ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC SMALL MU +1D708 ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC SMALL NU +1D709 ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC SMALL XI +1D70A ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC SMALL OMICRON +1D70B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC SMALL PI +1D70C ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC SMALL RHO +1D70D..1D70E ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC SMALL FINAL SIGMA..MATHEMATICAL ITALIC SMALL SIGMA +1D70F ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC SMALL TAU +1D710 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC SMALL UPSILON +1D711 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC SMALL PHI +1D712 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC SMALL CHI +1D713 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC SMALL PSI +1D714 ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC SMALL OMEGA +1D715 ; mapped ; 2202 # 3.1 MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL +1D716 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC EPSILON SYMBOL +1D717 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC THETA SYMBOL +1D718 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC KAPPA SYMBOL +1D719 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC PHI SYMBOL +1D71A ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC RHO SYMBOL +1D71B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC PI SYMBOL +1D71C ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ALPHA +1D71D ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL BETA +1D71E ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL GAMMA +1D71F ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL DELTA +1D720 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL EPSILON +1D721 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ZETA +1D722 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ETA +1D723 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA +1D724 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL IOTA +1D725 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL KAPPA +1D726 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL LAMDA +1D727 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL MU +1D728 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL NU +1D729 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL XI +1D72A ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMICRON +1D72B ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PI +1D72C ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL RHO +1D72D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA SYMBOL +1D72E ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL SIGMA +1D72F ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL TAU +1D730 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL UPSILON +1D731 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PHI +1D732 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL CHI +1D733 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PSI +1D734 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMEGA +1D735 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD ITALIC NABLA +1D736 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ALPHA +1D737 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC SMALL BETA +1D738 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL GAMMA +1D739 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL DELTA +1D73A ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL EPSILON +1D73B ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ZETA +1D73C ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ETA +1D73D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL THETA +1D73E ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL IOTA +1D73F ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC SMALL KAPPA +1D740 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC SMALL LAMDA +1D741 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC SMALL MU +1D742 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC SMALL NU +1D743 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC SMALL XI +1D744 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMICRON +1D745 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PI +1D746 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL RHO +1D747..1D748 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL BOLD ITALIC SMALL SIGMA +1D749 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL TAU +1D74A ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL UPSILON +1D74B ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PHI +1D74C ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL CHI +1D74D ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PSI +1D74E ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMEGA +1D74F ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL +1D750 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC EPSILON SYMBOL +1D751 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC THETA SYMBOL +1D752 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC KAPPA SYMBOL +1D753 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC PHI SYMBOL +1D754 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC RHO SYMBOL +1D755 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC PI SYMBOL +1D756 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ALPHA +1D757 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL BETA +1D758 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL GAMMA +1D759 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL DELTA +1D75A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL EPSILON +1D75B ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ZETA +1D75C ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ETA +1D75D ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA +1D75E ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL IOTA +1D75F ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL KAPPA +1D760 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL LAMDA +1D761 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL MU +1D762 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL NU +1D763 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL XI +1D764 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMICRON +1D765 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PI +1D766 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL RHO +1D767 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA SYMBOL +1D768 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL SIGMA +1D769 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL TAU +1D76A ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL UPSILON +1D76B ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PHI +1D76C ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL CHI +1D76D ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PSI +1D76E ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA +1D76F ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD NABLA +1D770 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA +1D771 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL BETA +1D772 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL GAMMA +1D773 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL DELTA +1D774 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL EPSILON +1D775 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ZETA +1D776 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ETA +1D777 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL THETA +1D778 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL IOTA +1D779 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL KAPPA +1D77A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL LAMDA +1D77B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL MU +1D77C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL NU +1D77D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL XI +1D77E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMICRON +1D77F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PI +1D780 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL RHO +1D781..1D782 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD SMALL SIGMA +1D783 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL TAU +1D784 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL UPSILON +1D785 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PHI +1D786 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL CHI +1D787 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PSI +1D788 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA +1D789 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL +1D78A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL +1D78B ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD THETA SYMBOL +1D78C ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD KAPPA SYMBOL +1D78D ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD PHI SYMBOL +1D78E ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD RHO SYMBOL +1D78F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD PI SYMBOL +1D790 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ALPHA +1D791 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL BETA +1D792 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL GAMMA +1D793 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL DELTA +1D794 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL EPSILON +1D795 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ZETA +1D796 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ETA +1D797 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA +1D798 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL IOTA +1D799 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL KAPPA +1D79A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL LAMDA +1D79B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL MU +1D79C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL NU +1D79D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL XI +1D79E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMICRON +1D79F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PI +1D7A0 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL RHO +1D7A1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA SYMBOL +1D7A2 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL SIGMA +1D7A3 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL TAU +1D7A4 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL UPSILON +1D7A5 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PHI +1D7A6 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL CHI +1D7A7 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PSI +1D7A8 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA +1D7A9 ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA +1D7AA ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA +1D7AB ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL BETA +1D7AC ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL GAMMA +1D7AD ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL DELTA +1D7AE ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL EPSILON +1D7AF ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ZETA +1D7B0 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ETA +1D7B1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL THETA +1D7B2 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL IOTA +1D7B3 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL KAPPA +1D7B4 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL LAMDA +1D7B5 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL MU +1D7B6 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL NU +1D7B7 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL XI +1D7B8 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMICRON +1D7B9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PI +1D7BA ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL RHO +1D7BB..1D7BC ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL SIGMA +1D7BD ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL TAU +1D7BE ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL UPSILON +1D7BF ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PHI +1D7C0 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL CHI +1D7C1 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PSI +1D7C2 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA +1D7C3 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL +1D7C4 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL +1D7C5 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC THETA SYMBOL +1D7C6 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC KAPPA SYMBOL +1D7C7 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PHI SYMBOL +1D7C8 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC RHO SYMBOL +1D7C9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PI SYMBOL +1D7CA..1D7CB ; mapped ; 03DD # 5.0 MATHEMATICAL BOLD CAPITAL DIGAMMA..MATHEMATICAL BOLD SMALL DIGAMMA +1D7CC..1D7CD ; disallowed # NA .. +1D7CE ; mapped ; 0030 # 3.1 MATHEMATICAL BOLD DIGIT ZERO +1D7CF ; mapped ; 0031 # 3.1 MATHEMATICAL BOLD DIGIT ONE +1D7D0 ; mapped ; 0032 # 3.1 MATHEMATICAL BOLD DIGIT TWO +1D7D1 ; mapped ; 0033 # 3.1 MATHEMATICAL BOLD DIGIT THREE +1D7D2 ; mapped ; 0034 # 3.1 MATHEMATICAL BOLD DIGIT FOUR +1D7D3 ; mapped ; 0035 # 3.1 MATHEMATICAL BOLD DIGIT FIVE +1D7D4 ; mapped ; 0036 # 3.1 MATHEMATICAL BOLD DIGIT SIX +1D7D5 ; mapped ; 0037 # 3.1 MATHEMATICAL BOLD DIGIT SEVEN +1D7D6 ; mapped ; 0038 # 3.1 MATHEMATICAL BOLD DIGIT EIGHT +1D7D7 ; mapped ; 0039 # 3.1 MATHEMATICAL BOLD DIGIT NINE +1D7D8 ; mapped ; 0030 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ZERO +1D7D9 ; mapped ; 0031 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ONE +1D7DA ; mapped ; 0032 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT TWO +1D7DB ; mapped ; 0033 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT THREE +1D7DC ; mapped ; 0034 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FOUR +1D7DD ; mapped ; 0035 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FIVE +1D7DE ; mapped ; 0036 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SIX +1D7DF ; mapped ; 0037 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SEVEN +1D7E0 ; mapped ; 0038 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT EIGHT +1D7E1 ; mapped ; 0039 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT NINE +1D7E2 ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ZERO +1D7E3 ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ONE +1D7E4 ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF DIGIT TWO +1D7E5 ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF DIGIT THREE +1D7E6 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FOUR +1D7E7 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FIVE +1D7E8 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SIX +1D7E9 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SEVEN +1D7EA ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF DIGIT EIGHT +1D7EB ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF DIGIT NINE +1D7EC ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ZERO +1D7ED ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ONE +1D7EE ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT TWO +1D7EF ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT THREE +1D7F0 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FOUR +1D7F1 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FIVE +1D7F2 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SIX +1D7F3 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SEVEN +1D7F4 ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT EIGHT +1D7F5 ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT NINE +1D7F6 ; mapped ; 0030 # 3.1 MATHEMATICAL MONOSPACE DIGIT ZERO +1D7F7 ; mapped ; 0031 # 3.1 MATHEMATICAL MONOSPACE DIGIT ONE +1D7F8 ; mapped ; 0032 # 3.1 MATHEMATICAL MONOSPACE DIGIT TWO +1D7F9 ; mapped ; 0033 # 3.1 MATHEMATICAL MONOSPACE DIGIT THREE +1D7FA ; mapped ; 0034 # 3.1 MATHEMATICAL MONOSPACE DIGIT FOUR +1D7FB ; mapped ; 0035 # 3.1 MATHEMATICAL MONOSPACE DIGIT FIVE +1D7FC ; mapped ; 0036 # 3.1 MATHEMATICAL MONOSPACE DIGIT SIX +1D7FD ; mapped ; 0037 # 3.1 MATHEMATICAL MONOSPACE DIGIT SEVEN +1D7FE ; mapped ; 0038 # 3.1 MATHEMATICAL MONOSPACE DIGIT EIGHT +1D7FF ; mapped ; 0039 # 3.1 MATHEMATICAL MONOSPACE DIGIT NINE +1D800..1D9FF ; valid ; ; NV8 # 8.0 SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD +1DA00..1DA36 ; valid # 8.0 SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN +1DA37..1DA3A ; valid ; ; NV8 # 8.0 SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE +1DA3B..1DA6C ; valid # 8.0 SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT +1DA6D..1DA74 ; valid ; ; NV8 # 8.0 SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING +1DA75 ; valid # 8.0 SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS +1DA76..1DA83 ; valid ; ; NV8 # 8.0 SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH +1DA84 ; valid # 8.0 SIGNWRITING LOCATION HEAD NECK +1DA85..1DA8B ; valid ; ; NV8 # 8.0 SIGNWRITING LOCATION TORSO..SIGNWRITING PARENTHESIS +1DA8C..1DA9A ; disallowed # NA .. +1DA9B..1DA9F ; valid # 8.0 SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6 +1DAA0 ; disallowed # NA +1DAA1..1DAAF ; valid # 8.0 SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16 +1DAB0..1DFFF ; disallowed # NA .. +1E000..1E006 ; valid # 9.0 COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE +1E007 ; disallowed # NA +1E008..1E018 ; valid # 9.0 COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU +1E019..1E01A ; disallowed # NA .. +1E01B..1E021 ; valid # 9.0 COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI +1E022 ; disallowed # NA +1E023..1E024 ; valid # 9.0 COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS +1E025 ; disallowed # NA +1E026..1E02A ; valid # 9.0 COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA +1E02B..1E7FF ; disallowed # NA .. +1E800..1E8C4 ; valid # 7.0 MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON +1E8C5..1E8C6 ; disallowed # NA .. +1E8C7..1E8CF ; valid ; ; NV8 # 7.0 MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE +1E8D0..1E8D6 ; valid # 7.0 MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS +1E8D7..1E8FF ; disallowed # NA .. +1E900 ; mapped ; 1E922 # 9.0 ADLAM CAPITAL LETTER ALIF +1E901 ; mapped ; 1E923 # 9.0 ADLAM CAPITAL LETTER DAALI +1E902 ; mapped ; 1E924 # 9.0 ADLAM CAPITAL LETTER LAAM +1E903 ; mapped ; 1E925 # 9.0 ADLAM CAPITAL LETTER MIIM +1E904 ; mapped ; 1E926 # 9.0 ADLAM CAPITAL LETTER BA +1E905 ; mapped ; 1E927 # 9.0 ADLAM CAPITAL LETTER SINNYIIYHE +1E906 ; mapped ; 1E928 # 9.0 ADLAM CAPITAL LETTER PE +1E907 ; mapped ; 1E929 # 9.0 ADLAM CAPITAL LETTER BHE +1E908 ; mapped ; 1E92A # 9.0 ADLAM CAPITAL LETTER RA +1E909 ; mapped ; 1E92B # 9.0 ADLAM CAPITAL LETTER E +1E90A ; mapped ; 1E92C # 9.0 ADLAM CAPITAL LETTER FA +1E90B ; mapped ; 1E92D # 9.0 ADLAM CAPITAL LETTER I +1E90C ; mapped ; 1E92E # 9.0 ADLAM CAPITAL LETTER O +1E90D ; mapped ; 1E92F # 9.0 ADLAM CAPITAL LETTER DHA +1E90E ; mapped ; 1E930 # 9.0 ADLAM CAPITAL LETTER YHE +1E90F ; mapped ; 1E931 # 9.0 ADLAM CAPITAL LETTER WAW +1E910 ; mapped ; 1E932 # 9.0 ADLAM CAPITAL LETTER NUN +1E911 ; mapped ; 1E933 # 9.0 ADLAM CAPITAL LETTER KAF +1E912 ; mapped ; 1E934 # 9.0 ADLAM CAPITAL LETTER YA +1E913 ; mapped ; 1E935 # 9.0 ADLAM CAPITAL LETTER U +1E914 ; mapped ; 1E936 # 9.0 ADLAM CAPITAL LETTER JIIM +1E915 ; mapped ; 1E937 # 9.0 ADLAM CAPITAL LETTER CHI +1E916 ; mapped ; 1E938 # 9.0 ADLAM CAPITAL LETTER HA +1E917 ; mapped ; 1E939 # 9.0 ADLAM CAPITAL LETTER QAAF +1E918 ; mapped ; 1E93A # 9.0 ADLAM CAPITAL LETTER GA +1E919 ; mapped ; 1E93B # 9.0 ADLAM CAPITAL LETTER NYA +1E91A ; mapped ; 1E93C # 9.0 ADLAM CAPITAL LETTER TU +1E91B ; mapped ; 1E93D # 9.0 ADLAM CAPITAL LETTER NHA +1E91C ; mapped ; 1E93E # 9.0 ADLAM CAPITAL LETTER VA +1E91D ; mapped ; 1E93F # 9.0 ADLAM CAPITAL LETTER KHA +1E91E ; mapped ; 1E940 # 9.0 ADLAM CAPITAL LETTER GBE +1E91F ; mapped ; 1E941 # 9.0 ADLAM CAPITAL LETTER ZAL +1E920 ; mapped ; 1E942 # 9.0 ADLAM CAPITAL LETTER KPO +1E921 ; mapped ; 1E943 # 9.0 ADLAM CAPITAL LETTER SHA +1E922..1E94A ; valid # 9.0 ADLAM SMALL LETTER ALIF..ADLAM NUKTA +1E94B..1E94F ; disallowed # NA .. +1E950..1E959 ; valid # 9.0 ADLAM DIGIT ZERO..ADLAM DIGIT NINE +1E95A..1E95D ; disallowed # NA .. +1E95E..1E95F ; valid ; ; NV8 # 9.0 ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK +1E960..1EDFF ; disallowed # NA .. +1EE00 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL ALEF +1EE01 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL BEH +1EE02 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL JEEM +1EE03 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DAL +1EE04 ; disallowed # NA +1EE05 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL WAW +1EE06 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL ZAIN +1EE07 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL HAH +1EE08 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL TAH +1EE09 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL YEH +1EE0A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL KAF +1EE0B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LAM +1EE0C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL MEEM +1EE0D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL NOON +1EE0E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL SEEN +1EE0F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL AIN +1EE10 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL FEH +1EE11 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL SAD +1EE12 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL QAF +1EE13 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL REH +1EE14 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL SHEEN +1EE15 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL TEH +1EE16 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL THEH +1EE17 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL KHAH +1EE18 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL THAL +1EE19 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DAD +1EE1A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL ZAH +1EE1B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL GHAIN +1EE1C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL DOTLESS BEH +1EE1D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL DOTLESS NOON +1EE1E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL DOTLESS FEH +1EE1F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL DOTLESS QAF +1EE20 ; disallowed # NA +1EE21 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL INITIAL BEH +1EE22 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL INITIAL JEEM +1EE23 ; disallowed # NA +1EE24 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL INITIAL HEH +1EE25..1EE26 ; disallowed # NA .. +1EE27 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL INITIAL HAH +1EE28 ; disallowed # NA +1EE29 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL INITIAL YEH +1EE2A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL INITIAL KAF +1EE2B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL INITIAL LAM +1EE2C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL INITIAL MEEM +1EE2D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL INITIAL NOON +1EE2E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL INITIAL SEEN +1EE2F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL INITIAL AIN +1EE30 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL INITIAL FEH +1EE31 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL INITIAL SAD +1EE32 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL INITIAL QAF +1EE33 ; disallowed # NA +1EE34 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL INITIAL SHEEN +1EE35 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL INITIAL TEH +1EE36 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL INITIAL THEH +1EE37 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL INITIAL KHAH +1EE38 ; disallowed # NA +1EE39 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL INITIAL DAD +1EE3A ; disallowed # NA +1EE3B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL INITIAL GHAIN +1EE3C..1EE41 ; disallowed # NA .. +1EE42 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL TAILED JEEM +1EE43..1EE46 ; disallowed # NA .. +1EE47 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL TAILED HAH +1EE48 ; disallowed # NA +1EE49 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL TAILED YEH +1EE4A ; disallowed # NA +1EE4B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL TAILED LAM +1EE4C ; disallowed # NA +1EE4D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL TAILED NOON +1EE4E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL TAILED SEEN +1EE4F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL TAILED AIN +1EE50 ; disallowed # NA +1EE51 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL TAILED SAD +1EE52 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL TAILED QAF +1EE53 ; disallowed # NA +1EE54 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL TAILED SHEEN +1EE55..1EE56 ; disallowed # NA .. +1EE57 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL TAILED KHAH +1EE58 ; disallowed # NA +1EE59 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL TAILED DAD +1EE5A ; disallowed # NA +1EE5B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL TAILED GHAIN +1EE5C ; disallowed # NA +1EE5D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS NOON +1EE5E ; disallowed # NA +1EE5F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS QAF +1EE60 ; disallowed # NA +1EE61 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL STRETCHED BEH +1EE62 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL STRETCHED JEEM +1EE63 ; disallowed # NA +1EE64 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL STRETCHED HEH +1EE65..1EE66 ; disallowed # NA .. +1EE67 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL STRETCHED HAH +1EE68 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL STRETCHED TAH +1EE69 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL STRETCHED YEH +1EE6A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL STRETCHED KAF +1EE6B ; disallowed # NA +1EE6C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL STRETCHED MEEM +1EE6D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL STRETCHED NOON +1EE6E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL STRETCHED SEEN +1EE6F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL STRETCHED AIN +1EE70 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL STRETCHED FEH +1EE71 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL STRETCHED SAD +1EE72 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL STRETCHED QAF +1EE73 ; disallowed # NA +1EE74 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL STRETCHED SHEEN +1EE75 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL STRETCHED TEH +1EE76 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL STRETCHED THEH +1EE77 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL STRETCHED KHAH +1EE78 ; disallowed # NA +1EE79 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL STRETCHED DAD +1EE7A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL STRETCHED ZAH +1EE7B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL STRETCHED GHAIN +1EE7C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS BEH +1EE7D ; disallowed # NA +1EE7E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS FEH +1EE7F ; disallowed # NA +1EE80 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL LOOPED ALEF +1EE81 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL LOOPED BEH +1EE82 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL LOOPED JEEM +1EE83 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL LOOPED DAL +1EE84 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL LOOPED HEH +1EE85 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL LOOPED WAW +1EE86 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL LOOPED ZAIN +1EE87 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL LOOPED HAH +1EE88 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL LOOPED TAH +1EE89 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL LOOPED YEH +1EE8A ; disallowed # NA +1EE8B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LOOPED LAM +1EE8C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL LOOPED MEEM +1EE8D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL LOOPED NOON +1EE8E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL LOOPED SEEN +1EE8F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL LOOPED AIN +1EE90 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL LOOPED FEH +1EE91 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL LOOPED SAD +1EE92 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL LOOPED QAF +1EE93 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL LOOPED REH +1EE94 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL LOOPED SHEEN +1EE95 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL LOOPED TEH +1EE96 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL LOOPED THEH +1EE97 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL LOOPED KHAH +1EE98 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL LOOPED THAL +1EE99 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL LOOPED DAD +1EE9A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL LOOPED ZAH +1EE9B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL LOOPED GHAIN +1EE9C..1EEA0 ; disallowed # NA .. +1EEA1 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK BEH +1EEA2 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM +1EEA3 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAL +1EEA4 ; disallowed # NA +1EEA5 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK WAW +1EEA6 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN +1EEA7 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK HAH +1EEA8 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TAH +1EEA9 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK YEH +1EEAA ; disallowed # NA +1EEAB ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK LAM +1EEAC ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM +1EEAD ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK NOON +1EEAE ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN +1EEAF ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK AIN +1EEB0 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK FEH +1EEB1 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SAD +1EEB2 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK QAF +1EEB3 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK REH +1EEB4 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN +1EEB5 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TEH +1EEB6 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THEH +1EEB7 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH +1EEB8 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THAL +1EEB9 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAD +1EEBA ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH +1EEBB ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN +1EEBC..1EEEF ; disallowed # NA .. +1EEF0..1EEF1 ; valid ; ; NV8 # 6.1 ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL +1EEF2..1EFFF ; disallowed # NA .. +1F000..1F02B ; valid ; ; NV8 # 5.1 MAHJONG TILE EAST WIND..MAHJONG TILE BACK +1F02C..1F02F ; disallowed # NA .. +1F030..1F093 ; valid ; ; NV8 # 5.1 DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06 +1F094..1F09F ; disallowed # NA .. +1F0A0..1F0AE ; valid ; ; NV8 # 6.0 PLAYING CARD BACK..PLAYING CARD KING OF SPADES +1F0AF..1F0B0 ; disallowed # NA .. +1F0B1..1F0BE ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF HEARTS..PLAYING CARD KING OF HEARTS +1F0BF ; valid ; ; NV8 # 7.0 PLAYING CARD RED JOKER +1F0C0 ; disallowed # NA +1F0C1..1F0CF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF DIAMONDS..PLAYING CARD BLACK JOKER +1F0D0 ; disallowed # NA +1F0D1..1F0DF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF CLUBS..PLAYING CARD WHITE JOKER +1F0E0..1F0F5 ; valid ; ; NV8 # 7.0 PLAYING CARD FOOL..PLAYING CARD TRUMP-21 +1F0F6..1F0FF ; disallowed # NA .. +1F100 ; disallowed # 5.2 DIGIT ZERO FULL STOP +1F101 ; disallowed_STD3_mapped ; 0030 002C # 5.2 DIGIT ZERO COMMA +1F102 ; disallowed_STD3_mapped ; 0031 002C # 5.2 DIGIT ONE COMMA +1F103 ; disallowed_STD3_mapped ; 0032 002C # 5.2 DIGIT TWO COMMA +1F104 ; disallowed_STD3_mapped ; 0033 002C # 5.2 DIGIT THREE COMMA +1F105 ; disallowed_STD3_mapped ; 0034 002C # 5.2 DIGIT FOUR COMMA +1F106 ; disallowed_STD3_mapped ; 0035 002C # 5.2 DIGIT FIVE COMMA +1F107 ; disallowed_STD3_mapped ; 0036 002C # 5.2 DIGIT SIX COMMA +1F108 ; disallowed_STD3_mapped ; 0037 002C # 5.2 DIGIT SEVEN COMMA +1F109 ; disallowed_STD3_mapped ; 0038 002C # 5.2 DIGIT EIGHT COMMA +1F10A ; disallowed_STD3_mapped ; 0039 002C # 5.2 DIGIT NINE COMMA +1F10B..1F10C ; valid ; ; NV8 # 7.0 DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO +1F10D..1F10F ; disallowed # NA .. +1F110 ; disallowed_STD3_mapped ; 0028 0061 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER A +1F111 ; disallowed_STD3_mapped ; 0028 0062 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER B +1F112 ; disallowed_STD3_mapped ; 0028 0063 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER C +1F113 ; disallowed_STD3_mapped ; 0028 0064 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER D +1F114 ; disallowed_STD3_mapped ; 0028 0065 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER E +1F115 ; disallowed_STD3_mapped ; 0028 0066 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER F +1F116 ; disallowed_STD3_mapped ; 0028 0067 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER G +1F117 ; disallowed_STD3_mapped ; 0028 0068 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER H +1F118 ; disallowed_STD3_mapped ; 0028 0069 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER I +1F119 ; disallowed_STD3_mapped ; 0028 006A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER J +1F11A ; disallowed_STD3_mapped ; 0028 006B 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER K +1F11B ; disallowed_STD3_mapped ; 0028 006C 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER L +1F11C ; disallowed_STD3_mapped ; 0028 006D 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER M +1F11D ; disallowed_STD3_mapped ; 0028 006E 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER N +1F11E ; disallowed_STD3_mapped ; 0028 006F 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER O +1F11F ; disallowed_STD3_mapped ; 0028 0070 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER P +1F120 ; disallowed_STD3_mapped ; 0028 0071 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Q +1F121 ; disallowed_STD3_mapped ; 0028 0072 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER R +1F122 ; disallowed_STD3_mapped ; 0028 0073 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER S +1F123 ; disallowed_STD3_mapped ; 0028 0074 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER T +1F124 ; disallowed_STD3_mapped ; 0028 0075 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER U +1F125 ; disallowed_STD3_mapped ; 0028 0076 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER V +1F126 ; disallowed_STD3_mapped ; 0028 0077 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER W +1F127 ; disallowed_STD3_mapped ; 0028 0078 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER X +1F128 ; disallowed_STD3_mapped ; 0028 0079 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Y +1F129 ; disallowed_STD3_mapped ; 0028 007A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Z +1F12A ; mapped ; 3014 0073 3015 #5.2 TORTOISE SHELL BRACKETED LATIN CAPITAL LETTER S +1F12B ; mapped ; 0063 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER C +1F12C ; mapped ; 0072 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER R +1F12D ; mapped ; 0063 0064 # 5.2 CIRCLED CD +1F12E ; mapped ; 0077 007A # 5.2 CIRCLED WZ +1F12F ; disallowed # NA +1F130 ; mapped ; 0061 # 6.0 SQUARED LATIN CAPITAL LETTER A +1F131 ; mapped ; 0062 # 5.2 SQUARED LATIN CAPITAL LETTER B +1F132 ; mapped ; 0063 # 6.0 SQUARED LATIN CAPITAL LETTER C +1F133 ; mapped ; 0064 # 6.0 SQUARED LATIN CAPITAL LETTER D +1F134 ; mapped ; 0065 # 6.0 SQUARED LATIN CAPITAL LETTER E +1F135 ; mapped ; 0066 # 6.0 SQUARED LATIN CAPITAL LETTER F +1F136 ; mapped ; 0067 # 6.0 SQUARED LATIN CAPITAL LETTER G +1F137 ; mapped ; 0068 # 6.0 SQUARED LATIN CAPITAL LETTER H +1F138 ; mapped ; 0069 # 6.0 SQUARED LATIN CAPITAL LETTER I +1F139 ; mapped ; 006A # 6.0 SQUARED LATIN CAPITAL LETTER J +1F13A ; mapped ; 006B # 6.0 SQUARED LATIN CAPITAL LETTER K +1F13B ; mapped ; 006C # 6.0 SQUARED LATIN CAPITAL LETTER L +1F13C ; mapped ; 006D # 6.0 SQUARED LATIN CAPITAL LETTER M +1F13D ; mapped ; 006E # 5.2 SQUARED LATIN CAPITAL LETTER N +1F13E ; mapped ; 006F # 6.0 SQUARED LATIN CAPITAL LETTER O +1F13F ; mapped ; 0070 # 5.2 SQUARED LATIN CAPITAL LETTER P +1F140 ; mapped ; 0071 # 6.0 SQUARED LATIN CAPITAL LETTER Q +1F141 ; mapped ; 0072 # 6.0 SQUARED LATIN CAPITAL LETTER R +1F142 ; mapped ; 0073 # 5.2 SQUARED LATIN CAPITAL LETTER S +1F143 ; mapped ; 0074 # 6.0 SQUARED LATIN CAPITAL LETTER T +1F144 ; mapped ; 0075 # 6.0 SQUARED LATIN CAPITAL LETTER U +1F145 ; mapped ; 0076 # 6.0 SQUARED LATIN CAPITAL LETTER V +1F146 ; mapped ; 0077 # 5.2 SQUARED LATIN CAPITAL LETTER W +1F147 ; mapped ; 0078 # 6.0 SQUARED LATIN CAPITAL LETTER X +1F148 ; mapped ; 0079 # 6.0 SQUARED LATIN CAPITAL LETTER Y +1F149 ; mapped ; 007A # 6.0 SQUARED LATIN CAPITAL LETTER Z +1F14A ; mapped ; 0068 0076 # 5.2 SQUARED HV +1F14B ; mapped ; 006D 0076 # 5.2 SQUARED MV +1F14C ; mapped ; 0073 0064 # 5.2 SQUARED SD +1F14D ; mapped ; 0073 0073 # 5.2 SQUARED SS +1F14E ; mapped ; 0070 0070 0076 #5.2 SQUARED PPV +1F14F ; mapped ; 0077 0063 # 6.0 SQUARED WC +1F150..1F156 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER G +1F157 ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER H +1F158..1F15E ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER I..NEGATIVE CIRCLED LATIN CAPITAL LETTER O +1F15F ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER P +1F160..1F169 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER Q..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z +1F16A ; mapped ; 006D 0063 # 6.1 RAISED MC SIGN +1F16B ; mapped ; 006D 0064 # 6.1 RAISED MD SIGN +1F16C..1F16F ; disallowed # NA .. +1F170..1F178 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED LATIN CAPITAL LETTER I +1F179 ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER J +1F17A ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER K +1F17B..1F17C ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER L..NEGATIVE SQUARED LATIN CAPITAL LETTER M +1F17D..1F17E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER N..NEGATIVE SQUARED LATIN CAPITAL LETTER O +1F17F ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER P +1F180..1F189 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER Q..NEGATIVE SQUARED LATIN CAPITAL LETTER Z +1F18A..1F18D ; valid ; ; NV8 # 5.2 CROSSED NEGATIVE SQUARED LATIN CAPITAL LETTER P..NEGATIVE SQUARED SA +1F18E..1F18F ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED AB..NEGATIVE SQUARED WC +1F190 ; mapped ; 0064 006A # 5.2 SQUARE DJ +1F191..1F19A ; valid ; ; NV8 # 6.0 SQUARED CL..SQUARED VS +1F19B..1F1AC ; valid ; ; NV8 # 9.0 SQUARED THREE D..SQUARED VOD +1F1AD..1F1E5 ; disallowed # NA .. +1F1E6..1F1FF ; valid ; ; NV8 # 6.0 REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z +1F200 ; mapped ; 307B 304B # 5.2 SQUARE HIRAGANA HOKA +1F201 ; mapped ; 30B3 30B3 # 6.0 SQUARED KATAKANA KOKO +1F202 ; mapped ; 30B5 # 6.0 SQUARED KATAKANA SA +1F203..1F20F ; disallowed # NA .. +1F210 ; mapped ; 624B # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-624B +1F211 ; mapped ; 5B57 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5B57 +1F212 ; mapped ; 53CC # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53CC +1F213 ; mapped ; 30C7 # 5.2 SQUARED KATAKANA DE +1F214 ; mapped ; 4E8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E8C +1F215 ; mapped ; 591A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-591A +1F216 ; mapped ; 89E3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-89E3 +1F217 ; mapped ; 5929 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5929 +1F218 ; mapped ; 4EA4 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4EA4 +1F219 ; mapped ; 6620 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6620 +1F21A ; mapped ; 7121 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7121 +1F21B ; mapped ; 6599 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6599 +1F21C ; mapped ; 524D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-524D +1F21D ; mapped ; 5F8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5F8C +1F21E ; mapped ; 518D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-518D +1F21F ; mapped ; 65B0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-65B0 +1F220 ; mapped ; 521D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-521D +1F221 ; mapped ; 7D42 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7D42 +1F222 ; mapped ; 751F # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-751F +1F223 ; mapped ; 8CA9 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8CA9 +1F224 ; mapped ; 58F0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-58F0 +1F225 ; mapped ; 5439 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5439 +1F226 ; mapped ; 6F14 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6F14 +1F227 ; mapped ; 6295 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6295 +1F228 ; mapped ; 6355 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6355 +1F229 ; mapped ; 4E00 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E00 +1F22A ; mapped ; 4E09 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E09 +1F22B ; mapped ; 904A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-904A +1F22C ; mapped ; 5DE6 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5DE6 +1F22D ; mapped ; 4E2D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E2D +1F22E ; mapped ; 53F3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53F3 +1F22F ; mapped ; 6307 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6307 +1F230 ; mapped ; 8D70 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8D70 +1F231 ; mapped ; 6253 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6253 +1F232 ; mapped ; 7981 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7981 +1F233 ; mapped ; 7A7A # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7A7A +1F234 ; mapped ; 5408 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5408 +1F235 ; mapped ; 6E80 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6E80 +1F236 ; mapped ; 6709 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6709 +1F237 ; mapped ; 6708 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6708 +1F238 ; mapped ; 7533 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7533 +1F239 ; mapped ; 5272 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5272 +1F23A ; mapped ; 55B6 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-55B6 +1F23B ; mapped ; 914D # 9.0 SQUARED CJK UNIFIED IDEOGRAPH-914D +1F23C..1F23F ; disallowed # NA .. +1F240 ; mapped ; 3014 672C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C +1F241 ; mapped ; 3014 4E09 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E09 +1F242 ; mapped ; 3014 4E8C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E8C +1F243 ; mapped ; 3014 5B89 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-5B89 +1F244 ; mapped ; 3014 70B9 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-70B9 +1F245 ; mapped ; 3014 6253 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6253 +1F246 ; mapped ; 3014 76D7 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-76D7 +1F247 ; mapped ; 3014 52DD 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-52DD +1F248 ; mapped ; 3014 6557 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557 +1F249..1F24F ; disallowed # NA .. +1F250 ; mapped ; 5F97 # 6.0 CIRCLED IDEOGRAPH ADVANTAGE +1F251 ; mapped ; 53EF # 6.0 CIRCLED IDEOGRAPH ACCEPT +1F252..1F2FF ; disallowed # NA .. +1F300..1F320 ; valid ; ; NV8 # 6.0 CYCLONE..SHOOTING STAR +1F321..1F32C ; valid ; ; NV8 # 7.0 THERMOMETER..WIND BLOWING FACE +1F32D..1F32F ; valid ; ; NV8 # 8.0 HOT DOG..BURRITO +1F330..1F335 ; valid ; ; NV8 # 6.0 CHESTNUT..CACTUS +1F336 ; valid ; ; NV8 # 7.0 HOT PEPPER +1F337..1F37C ; valid ; ; NV8 # 6.0 TULIP..BABY BOTTLE +1F37D ; valid ; ; NV8 # 7.0 FORK AND KNIFE WITH PLATE +1F37E..1F37F ; valid ; ; NV8 # 8.0 BOTTLE WITH POPPING CORK..POPCORN +1F380..1F393 ; valid ; ; NV8 # 6.0 RIBBON..GRADUATION CAP +1F394..1F39F ; valid ; ; NV8 # 7.0 HEART WITH TIP ON THE LEFT..ADMISSION TICKETS +1F3A0..1F3C4 ; valid ; ; NV8 # 6.0 CAROUSEL HORSE..SURFER +1F3C5 ; valid ; ; NV8 # 7.0 SPORTS MEDAL +1F3C6..1F3CA ; valid ; ; NV8 # 6.0 TROPHY..SWIMMER +1F3CB..1F3CE ; valid ; ; NV8 # 7.0 WEIGHT LIFTER..RACING CAR +1F3CF..1F3D3 ; valid ; ; NV8 # 8.0 CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL +1F3D4..1F3DF ; valid ; ; NV8 # 7.0 SNOW CAPPED MOUNTAIN..STADIUM +1F3E0..1F3F0 ; valid ; ; NV8 # 6.0 HOUSE BUILDING..EUROPEAN CASTLE +1F3F1..1F3F7 ; valid ; ; NV8 # 7.0 WHITE PENNANT..LABEL +1F3F8..1F3FF ; valid ; ; NV8 # 8.0 BADMINTON RACQUET AND SHUTTLECOCK..EMOJI MODIFIER FITZPATRICK TYPE-6 +1F400..1F43E ; valid ; ; NV8 # 6.0 RAT..PAW PRINTS +1F43F ; valid ; ; NV8 # 7.0 CHIPMUNK +1F440 ; valid ; ; NV8 # 6.0 EYES +1F441 ; valid ; ; NV8 # 7.0 EYE +1F442..1F4F7 ; valid ; ; NV8 # 6.0 EAR..CAMERA +1F4F8 ; valid ; ; NV8 # 7.0 CAMERA WITH FLASH +1F4F9..1F4FC ; valid ; ; NV8 # 6.0 VIDEO CAMERA..VIDEOCASSETTE +1F4FD..1F4FE ; valid ; ; NV8 # 7.0 FILM PROJECTOR..PORTABLE STEREO +1F4FF ; valid ; ; NV8 # 8.0 PRAYER BEADS +1F500..1F53D ; valid ; ; NV8 # 6.0 TWISTED RIGHTWARDS ARROWS..DOWN-POINTING SMALL RED TRIANGLE +1F53E..1F53F ; valid ; ; NV8 # 7.0 LOWER RIGHT SHADOWED WHITE CIRCLE..UPPER RIGHT SHADOWED WHITE CIRCLE +1F540..1F543 ; valid ; ; NV8 # 6.1 CIRCLED CROSS POMMEE..NOTCHED LEFT SEMICIRCLE WITH THREE DOTS +1F544..1F54A ; valid ; ; NV8 # 7.0 NOTCHED RIGHT SEMICIRCLE WITH THREE DOTS..DOVE OF PEACE +1F54B..1F54F ; valid ; ; NV8 # 8.0 KAABA..BOWL OF HYGIEIA +1F550..1F567 ; valid ; ; NV8 # 6.0 CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY +1F568..1F579 ; valid ; ; NV8 # 7.0 RIGHT SPEAKER..JOYSTICK +1F57A ; valid ; ; NV8 # 9.0 MAN DANCING +1F57B..1F5A3 ; valid ; ; NV8 # 7.0 LEFT HAND TELEPHONE RECEIVER..BLACK DOWN POINTING BACKHAND INDEX +1F5A4 ; valid ; ; NV8 # 9.0 BLACK HEART +1F5A5..1F5FA ; valid ; ; NV8 # 7.0 DESKTOP COMPUTER..WORLD MAP +1F5FB..1F5FF ; valid ; ; NV8 # 6.0 MOUNT FUJI..MOYAI +1F600 ; valid ; ; NV8 # 6.1 GRINNING FACE +1F601..1F610 ; valid ; ; NV8 # 6.0 GRINNING FACE WITH SMILING EYES..NEUTRAL FACE +1F611 ; valid ; ; NV8 # 6.1 EXPRESSIONLESS FACE +1F612..1F614 ; valid ; ; NV8 # 6.0 UNAMUSED FACE..PENSIVE FACE +1F615 ; valid ; ; NV8 # 6.1 CONFUSED FACE +1F616 ; valid ; ; NV8 # 6.0 CONFOUNDED FACE +1F617 ; valid ; ; NV8 # 6.1 KISSING FACE +1F618 ; valid ; ; NV8 # 6.0 FACE THROWING A KISS +1F619 ; valid ; ; NV8 # 6.1 KISSING FACE WITH SMILING EYES +1F61A ; valid ; ; NV8 # 6.0 KISSING FACE WITH CLOSED EYES +1F61B ; valid ; ; NV8 # 6.1 FACE WITH STUCK-OUT TONGUE +1F61C..1F61E ; valid ; ; NV8 # 6.0 FACE WITH STUCK-OUT TONGUE AND WINKING EYE..DISAPPOINTED FACE +1F61F ; valid ; ; NV8 # 6.1 WORRIED FACE +1F620..1F625 ; valid ; ; NV8 # 6.0 ANGRY FACE..DISAPPOINTED BUT RELIEVED FACE +1F626..1F627 ; valid ; ; NV8 # 6.1 FROWNING FACE WITH OPEN MOUTH..ANGUISHED FACE +1F628..1F62B ; valid ; ; NV8 # 6.0 FEARFUL FACE..TIRED FACE +1F62C ; valid ; ; NV8 # 6.1 GRIMACING FACE +1F62D ; valid ; ; NV8 # 6.0 LOUDLY CRYING FACE +1F62E..1F62F ; valid ; ; NV8 # 6.1 FACE WITH OPEN MOUTH..HUSHED FACE +1F630..1F633 ; valid ; ; NV8 # 6.0 FACE WITH OPEN MOUTH AND COLD SWEAT..FLUSHED FACE +1F634 ; valid ; ; NV8 # 6.1 SLEEPING FACE +1F635..1F640 ; valid ; ; NV8 # 6.0 DIZZY FACE..WEARY CAT FACE +1F641..1F642 ; valid ; ; NV8 # 7.0 SLIGHTLY FROWNING FACE..SLIGHTLY SMILING FACE +1F643..1F644 ; valid ; ; NV8 # 8.0 UPSIDE-DOWN FACE..FACE WITH ROLLING EYES +1F645..1F64F ; valid ; ; NV8 # 6.0 FACE WITH NO GOOD GESTURE..PERSON WITH FOLDED HANDS +1F650..1F67F ; valid ; ; NV8 # 7.0 NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD +1F680..1F6C5 ; valid ; ; NV8 # 6.0 ROCKET..LEFT LUGGAGE +1F6C6..1F6CF ; valid ; ; NV8 # 7.0 TRIANGLE WITH ROUNDED CORNERS..BED +1F6D0 ; valid ; ; NV8 # 8.0 PLACE OF WORSHIP +1F6D1..1F6D2 ; valid ; ; NV8 # 9.0 OCTAGONAL SIGN..SHOPPING TROLLEY +1F6D3..1F6DF ; disallowed # NA .. +1F6E0..1F6EC ; valid ; ; NV8 # 7.0 HAMMER AND WRENCH..AIRPLANE ARRIVING +1F6ED..1F6EF ; disallowed # NA .. +1F6F0..1F6F3 ; valid ; ; NV8 # 7.0 SATELLITE..PASSENGER SHIP +1F6F4..1F6F6 ; valid ; ; NV8 # 9.0 SCOOTER..CANOE +1F6F7..1F6FF ; disallowed # NA .. +1F700..1F773 ; valid ; ; NV8 # 6.0 ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE +1F774..1F77F ; disallowed # NA .. +1F780..1F7D4 ; valid ; ; NV8 # 7.0 BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR +1F7D5..1F7FF ; disallowed # NA .. +1F800..1F80B ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD +1F80C..1F80F ; disallowed # NA .. +1F810..1F847 ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW +1F848..1F84F ; disallowed # NA .. +1F850..1F859 ; valid ; ; NV8 # 7.0 LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW +1F85A..1F85F ; disallowed # NA .. +1F860..1F887 ; valid ; ; NV8 # 7.0 WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW +1F888..1F88F ; disallowed # NA .. +1F890..1F8AD ; valid ; ; NV8 # 7.0 LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS +1F8AE..1F90F ; disallowed # NA .. +1F910..1F918 ; valid ; ; NV8 # 8.0 ZIPPER-MOUTH FACE..SIGN OF THE HORNS +1F919..1F91E ; valid ; ; NV8 # 9.0 CALL ME HAND..HAND WITH INDEX AND MIDDLE FINGERS CROSSED +1F91F ; disallowed # NA +1F920..1F927 ; valid ; ; NV8 # 9.0 FACE WITH COWBOY HAT..SNEEZING FACE +1F928..1F92F ; disallowed # NA .. +1F930 ; valid ; ; NV8 # 9.0 PREGNANT WOMAN +1F931..1F932 ; disallowed # NA .. +1F933..1F93E ; valid ; ; NV8 # 9.0 SELFIE..HANDBALL +1F93F ; disallowed # NA +1F940..1F94B ; valid ; ; NV8 # 9.0 WILTED FLOWER..MARTIAL ARTS UNIFORM +1F94C..1F94F ; disallowed # NA .. +1F950..1F95E ; valid ; ; NV8 # 9.0 CROISSANT..PANCAKES +1F95F..1F97F ; disallowed # NA .. +1F980..1F984 ; valid ; ; NV8 # 8.0 CRAB..UNICORN FACE +1F985..1F991 ; valid ; ; NV8 # 9.0 EAGLE..SQUID +1F992..1F9BF ; disallowed # NA .. +1F9C0 ; valid ; ; NV8 # 8.0 CHEESE WEDGE +1F9C1..1FFFD ; disallowed # NA .. +1FFFE..1FFFF ; disallowed # 2.0 .. +20000..2A6D6 ; valid # 3.1 CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6 +2A6D7..2A6FF ; disallowed # NA .. +2A700..2B734 ; valid # 5.2 CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B734 +2B735..2B73F ; disallowed # NA .. +2B740..2B81D ; valid # 6.0 CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D +2B81E..2B81F ; disallowed # NA .. +2B820..2CEA1 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 +2CEA2..2F7FF ; disallowed # NA .. +2F800 ; mapped ; 4E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F800 +2F801 ; mapped ; 4E38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F801 +2F802 ; mapped ; 4E41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F802 +2F803 ; mapped ; 20122 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F803 +2F804 ; mapped ; 4F60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F804 +2F805 ; mapped ; 4FAE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F805 +2F806 ; mapped ; 4FBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F806 +2F807 ; mapped ; 5002 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F807 +2F808 ; mapped ; 507A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F808 +2F809 ; mapped ; 5099 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F809 +2F80A ; mapped ; 50E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80A +2F80B ; mapped ; 50CF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80B +2F80C ; mapped ; 349E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80C +2F80D ; mapped ; 2063A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80D +2F80E ; mapped ; 514D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80E +2F80F ; mapped ; 5154 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80F +2F810 ; mapped ; 5164 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F810 +2F811 ; mapped ; 5177 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F811 +2F812 ; mapped ; 2051C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F812 +2F813 ; mapped ; 34B9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F813 +2F814 ; mapped ; 5167 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F814 +2F815 ; mapped ; 518D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F815 +2F816 ; mapped ; 2054B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F816 +2F817 ; mapped ; 5197 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F817 +2F818 ; mapped ; 51A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F818 +2F819 ; mapped ; 4ECC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F819 +2F81A ; mapped ; 51AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81A +2F81B ; mapped ; 51B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81B +2F81C ; mapped ; 291DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81C +2F81D ; mapped ; 51F5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81D +2F81E ; mapped ; 5203 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81E +2F81F ; mapped ; 34DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81F +2F820 ; mapped ; 523B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F820 +2F821 ; mapped ; 5246 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F821 +2F822 ; mapped ; 5272 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F822 +2F823 ; mapped ; 5277 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F823 +2F824 ; mapped ; 3515 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F824 +2F825 ; mapped ; 52C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F825 +2F826 ; mapped ; 52C9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F826 +2F827 ; mapped ; 52E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F827 +2F828 ; mapped ; 52FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F828 +2F829 ; mapped ; 5305 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F829 +2F82A ; mapped ; 5306 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82A +2F82B ; mapped ; 5317 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82B +2F82C ; mapped ; 5349 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82C +2F82D ; mapped ; 5351 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82D +2F82E ; mapped ; 535A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82E +2F82F ; mapped ; 5373 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82F +2F830 ; mapped ; 537D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F830 +2F831..2F833 ; mapped ; 537F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F831..CJK COMPATIBILITY IDEOGRAPH-2F833 +2F834 ; mapped ; 20A2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F834 +2F835 ; mapped ; 7070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F835 +2F836 ; mapped ; 53CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F836 +2F837 ; mapped ; 53DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F837 +2F838 ; mapped ; 20B63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F838 +2F839 ; mapped ; 53EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F839 +2F83A ; mapped ; 53F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83A +2F83B ; mapped ; 5406 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83B +2F83C ; mapped ; 549E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83C +2F83D ; mapped ; 5438 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83D +2F83E ; mapped ; 5448 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83E +2F83F ; mapped ; 5468 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83F +2F840 ; mapped ; 54A2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F840 +2F841 ; mapped ; 54F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F841 +2F842 ; mapped ; 5510 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F842 +2F843 ; mapped ; 5553 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F843 +2F844 ; mapped ; 5563 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F844 +2F845..2F846 ; mapped ; 5584 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F845..CJK COMPATIBILITY IDEOGRAPH-2F846 +2F847 ; mapped ; 5599 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F847 +2F848 ; mapped ; 55AB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F848 +2F849 ; mapped ; 55B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F849 +2F84A ; mapped ; 55C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84A +2F84B ; mapped ; 5716 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84B +2F84C ; mapped ; 5606 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84C +2F84D ; mapped ; 5717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84D +2F84E ; mapped ; 5651 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84E +2F84F ; mapped ; 5674 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84F +2F850 ; mapped ; 5207 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F850 +2F851 ; mapped ; 58EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F851 +2F852 ; mapped ; 57CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F852 +2F853 ; mapped ; 57F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F853 +2F854 ; mapped ; 580D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F854 +2F855 ; mapped ; 578B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F855 +2F856 ; mapped ; 5832 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F856 +2F857 ; mapped ; 5831 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F857 +2F858 ; mapped ; 58AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F858 +2F859 ; mapped ; 214E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F859 +2F85A ; mapped ; 58F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85A +2F85B ; mapped ; 58F7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85B +2F85C ; mapped ; 5906 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85C +2F85D ; mapped ; 591A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85D +2F85E ; mapped ; 5922 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85E +2F85F ; mapped ; 5962 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85F +2F860 ; mapped ; 216A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F860 +2F861 ; mapped ; 216EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F861 +2F862 ; mapped ; 59EC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F862 +2F863 ; mapped ; 5A1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F863 +2F864 ; mapped ; 5A27 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F864 +2F865 ; mapped ; 59D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F865 +2F866 ; mapped ; 5A66 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F866 +2F867 ; mapped ; 36EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F867 +2F868 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F868 +2F869 ; mapped ; 5B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F869 +2F86A..2F86B ; mapped ; 5B3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86A..CJK COMPATIBILITY IDEOGRAPH-2F86B +2F86C ; mapped ; 219C8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86C +2F86D ; mapped ; 5BC3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86D +2F86E ; mapped ; 5BD8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86E +2F86F ; mapped ; 5BE7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86F +2F870 ; mapped ; 5BF3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F870 +2F871 ; mapped ; 21B18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F871 +2F872 ; mapped ; 5BFF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F872 +2F873 ; mapped ; 5C06 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F873 +2F874 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F874 +2F875 ; mapped ; 5C22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F875 +2F876 ; mapped ; 3781 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F876 +2F877 ; mapped ; 5C60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F877 +2F878 ; mapped ; 5C6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F878 +2F879 ; mapped ; 5CC0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F879 +2F87A ; mapped ; 5C8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87A +2F87B ; mapped ; 21DE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87B +2F87C ; mapped ; 5D43 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87C +2F87D ; mapped ; 21DE6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87D +2F87E ; mapped ; 5D6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87E +2F87F ; mapped ; 5D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87F +2F880 ; mapped ; 5D7C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F880 +2F881 ; mapped ; 5DE1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F881 +2F882 ; mapped ; 5DE2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F882 +2F883 ; mapped ; 382F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F883 +2F884 ; mapped ; 5DFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F884 +2F885 ; mapped ; 5E28 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F885 +2F886 ; mapped ; 5E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F886 +2F887 ; mapped ; 5E69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F887 +2F888 ; mapped ; 3862 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F888 +2F889 ; mapped ; 22183 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F889 +2F88A ; mapped ; 387C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88A +2F88B ; mapped ; 5EB0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88B +2F88C ; mapped ; 5EB3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88C +2F88D ; mapped ; 5EB6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88D +2F88E ; mapped ; 5ECA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88E +2F88F ; mapped ; 2A392 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88F +2F890 ; mapped ; 5EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F890 +2F891..2F892 ; mapped ; 22331 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F891..CJK COMPATIBILITY IDEOGRAPH-2F892 +2F893 ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F893 +2F894..2F895 ; mapped ; 5F22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F894..CJK COMPATIBILITY IDEOGRAPH-2F895 +2F896 ; mapped ; 38C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F896 +2F897 ; mapped ; 232B8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F897 +2F898 ; mapped ; 261DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F898 +2F899 ; mapped ; 5F62 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F899 +2F89A ; mapped ; 5F6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89A +2F89B ; mapped ; 38E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89B +2F89C ; mapped ; 5F9A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89C +2F89D ; mapped ; 5FCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89D +2F89E ; mapped ; 5FD7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89E +2F89F ; mapped ; 5FF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89F +2F8A0 ; mapped ; 6081 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A0 +2F8A1 ; mapped ; 393A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A1 +2F8A2 ; mapped ; 391C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A2 +2F8A3 ; mapped ; 6094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A3 +2F8A4 ; mapped ; 226D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A4 +2F8A5 ; mapped ; 60C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A5 +2F8A6 ; mapped ; 6148 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A6 +2F8A7 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A7 +2F8A8 ; mapped ; 614E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A8 +2F8A9 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A9 +2F8AA ; mapped ; 617A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AA +2F8AB ; mapped ; 618E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AB +2F8AC ; mapped ; 61B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AC +2F8AD ; mapped ; 61A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AD +2F8AE ; mapped ; 61AF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AE +2F8AF ; mapped ; 61DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AF +2F8B0 ; mapped ; 61F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B0 +2F8B1 ; mapped ; 61F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B1 +2F8B2 ; mapped ; 6210 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B2 +2F8B3 ; mapped ; 621B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B3 +2F8B4 ; mapped ; 625D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B4 +2F8B5 ; mapped ; 62B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B5 +2F8B6 ; mapped ; 62D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B6 +2F8B7 ; mapped ; 6350 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B7 +2F8B8 ; mapped ; 22B0C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B8 +2F8B9 ; mapped ; 633D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B9 +2F8BA ; mapped ; 62FC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BA +2F8BB ; mapped ; 6368 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BB +2F8BC ; mapped ; 6383 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BC +2F8BD ; mapped ; 63E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BD +2F8BE ; mapped ; 22BF1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BE +2F8BF ; mapped ; 6422 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BF +2F8C0 ; mapped ; 63C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C0 +2F8C1 ; mapped ; 63A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C1 +2F8C2 ; mapped ; 3A2E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C2 +2F8C3 ; mapped ; 6469 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C3 +2F8C4 ; mapped ; 647E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C4 +2F8C5 ; mapped ; 649D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C5 +2F8C6 ; mapped ; 6477 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C6 +2F8C7 ; mapped ; 3A6C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C7 +2F8C8 ; mapped ; 654F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C8 +2F8C9 ; mapped ; 656C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C9 +2F8CA ; mapped ; 2300A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CA +2F8CB ; mapped ; 65E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CB +2F8CC ; mapped ; 66F8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CC +2F8CD ; mapped ; 6649 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CD +2F8CE ; mapped ; 3B19 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CE +2F8CF ; mapped ; 6691 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CF +2F8D0 ; mapped ; 3B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D0 +2F8D1 ; mapped ; 3AE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D1 +2F8D2 ; mapped ; 5192 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D2 +2F8D3 ; mapped ; 5195 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D3 +2F8D4 ; mapped ; 6700 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D4 +2F8D5 ; mapped ; 669C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D5 +2F8D6 ; mapped ; 80AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D6 +2F8D7 ; mapped ; 43D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D7 +2F8D8 ; mapped ; 6717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D8 +2F8D9 ; mapped ; 671B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D9 +2F8DA ; mapped ; 6721 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DA +2F8DB ; mapped ; 675E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DB +2F8DC ; mapped ; 6753 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DC +2F8DD ; mapped ; 233C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DD +2F8DE ; mapped ; 3B49 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DE +2F8DF ; mapped ; 67FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DF +2F8E0 ; mapped ; 6785 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E0 +2F8E1 ; mapped ; 6852 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E1 +2F8E2 ; mapped ; 6885 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E2 +2F8E3 ; mapped ; 2346D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E3 +2F8E4 ; mapped ; 688E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E4 +2F8E5 ; mapped ; 681F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E5 +2F8E6 ; mapped ; 6914 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E6 +2F8E7 ; mapped ; 3B9D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E7 +2F8E8 ; mapped ; 6942 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E8 +2F8E9 ; mapped ; 69A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E9 +2F8EA ; mapped ; 69EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EA +2F8EB ; mapped ; 6AA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EB +2F8EC ; mapped ; 236A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EC +2F8ED ; mapped ; 6ADB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8ED +2F8EE ; mapped ; 3C18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EE +2F8EF ; mapped ; 6B21 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EF +2F8F0 ; mapped ; 238A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F0 +2F8F1 ; mapped ; 6B54 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F1 +2F8F2 ; mapped ; 3C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F2 +2F8F3 ; mapped ; 6B72 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F3 +2F8F4 ; mapped ; 6B9F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F4 +2F8F5 ; mapped ; 6BBA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F5 +2F8F6 ; mapped ; 6BBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F6 +2F8F7 ; mapped ; 23A8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F7 +2F8F8 ; mapped ; 21D0B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F8 +2F8F9 ; mapped ; 23AFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F9 +2F8FA ; mapped ; 6C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FA +2F8FB ; mapped ; 23CBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FB +2F8FC ; mapped ; 6CBF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FC +2F8FD ; mapped ; 6CCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FD +2F8FE ; mapped ; 6C67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FE +2F8FF ; mapped ; 6D16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FF +2F900 ; mapped ; 6D3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F900 +2F901 ; mapped ; 6D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F901 +2F902 ; mapped ; 6D41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F902 +2F903 ; mapped ; 6D69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F903 +2F904 ; mapped ; 6D78 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F904 +2F905 ; mapped ; 6D85 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F905 +2F906 ; mapped ; 23D1E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F906 +2F907 ; mapped ; 6D34 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F907 +2F908 ; mapped ; 6E2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F908 +2F909 ; mapped ; 6E6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F909 +2F90A ; mapped ; 3D33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90A +2F90B ; mapped ; 6ECB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90B +2F90C ; mapped ; 6EC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90C +2F90D ; mapped ; 23ED1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90D +2F90E ; mapped ; 6DF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90E +2F90F ; mapped ; 6F6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90F +2F910 ; mapped ; 23F5E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F910 +2F911 ; mapped ; 23F8E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F911 +2F912 ; mapped ; 6FC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F912 +2F913 ; mapped ; 7039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F913 +2F914 ; mapped ; 701E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F914 +2F915 ; mapped ; 701B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F915 +2F916 ; mapped ; 3D96 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F916 +2F917 ; mapped ; 704A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F917 +2F918 ; mapped ; 707D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F918 +2F919 ; mapped ; 7077 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F919 +2F91A ; mapped ; 70AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91A +2F91B ; mapped ; 20525 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91B +2F91C ; mapped ; 7145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91C +2F91D ; mapped ; 24263 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91D +2F91E ; mapped ; 719C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91E +2F91F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91F +2F920 ; mapped ; 7228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F920 +2F921 ; mapped ; 7235 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F921 +2F922 ; mapped ; 7250 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F922 +2F923 ; mapped ; 24608 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F923 +2F924 ; mapped ; 7280 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F924 +2F925 ; mapped ; 7295 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F925 +2F926 ; mapped ; 24735 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F926 +2F927 ; mapped ; 24814 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F927 +2F928 ; mapped ; 737A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F928 +2F929 ; mapped ; 738B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F929 +2F92A ; mapped ; 3EAC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92A +2F92B ; mapped ; 73A5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92B +2F92C..2F92D ; mapped ; 3EB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92C..CJK COMPATIBILITY IDEOGRAPH-2F92D +2F92E ; mapped ; 7447 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92E +2F92F ; mapped ; 745C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92F +2F930 ; mapped ; 7471 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F930 +2F931 ; mapped ; 7485 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F931 +2F932 ; mapped ; 74CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F932 +2F933 ; mapped ; 3F1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F933 +2F934 ; mapped ; 7524 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F934 +2F935 ; mapped ; 24C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F935 +2F936 ; mapped ; 753E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F936 +2F937 ; mapped ; 24C92 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F937 +2F938 ; mapped ; 7570 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F938 +2F939 ; mapped ; 2219F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F939 +2F93A ; mapped ; 7610 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93A +2F93B ; mapped ; 24FA1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93B +2F93C ; mapped ; 24FB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93C +2F93D ; mapped ; 25044 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93D +2F93E ; mapped ; 3FFC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93E +2F93F ; mapped ; 4008 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93F +2F940 ; mapped ; 76F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F940 +2F941 ; mapped ; 250F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F941 +2F942 ; mapped ; 250F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F942 +2F943 ; mapped ; 25119 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F943 +2F944 ; mapped ; 25133 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F944 +2F945 ; mapped ; 771E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F945 +2F946..2F947 ; mapped ; 771F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F946..CJK COMPATIBILITY IDEOGRAPH-2F947 +2F948 ; mapped ; 774A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F948 +2F949 ; mapped ; 4039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F949 +2F94A ; mapped ; 778B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94A +2F94B ; mapped ; 4046 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94B +2F94C ; mapped ; 4096 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94C +2F94D ; mapped ; 2541D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94D +2F94E ; mapped ; 784E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94E +2F94F ; mapped ; 788C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94F +2F950 ; mapped ; 78CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F950 +2F951 ; mapped ; 40E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F951 +2F952 ; mapped ; 25626 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F952 +2F953 ; mapped ; 7956 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F953 +2F954 ; mapped ; 2569A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F954 +2F955 ; mapped ; 256C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F955 +2F956 ; mapped ; 798F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F956 +2F957 ; mapped ; 79EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F957 +2F958 ; mapped ; 412F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F958 +2F959 ; mapped ; 7A40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F959 +2F95A ; mapped ; 7A4A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95A +2F95B ; mapped ; 7A4F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95B +2F95C ; mapped ; 2597C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95C +2F95D..2F95E ; mapped ; 25AA7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95D..CJK COMPATIBILITY IDEOGRAPH-2F95E +2F95F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95F +2F960 ; mapped ; 4202 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F960 +2F961 ; mapped ; 25BAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F961 +2F962 ; mapped ; 7BC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F962 +2F963 ; mapped ; 7BC9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F963 +2F964 ; mapped ; 4227 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F964 +2F965 ; mapped ; 25C80 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F965 +2F966 ; mapped ; 7CD2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F966 +2F967 ; mapped ; 42A0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F967 +2F968 ; mapped ; 7CE8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F968 +2F969 ; mapped ; 7CE3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F969 +2F96A ; mapped ; 7D00 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96A +2F96B ; mapped ; 25F86 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96B +2F96C ; mapped ; 7D63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96C +2F96D ; mapped ; 4301 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96D +2F96E ; mapped ; 7DC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96E +2F96F ; mapped ; 7E02 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96F +2F970 ; mapped ; 7E45 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F970 +2F971 ; mapped ; 4334 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F971 +2F972 ; mapped ; 26228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F972 +2F973 ; mapped ; 26247 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F973 +2F974 ; mapped ; 4359 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F974 +2F975 ; mapped ; 262D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F975 +2F976 ; mapped ; 7F7A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F976 +2F977 ; mapped ; 2633E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F977 +2F978 ; mapped ; 7F95 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F978 +2F979 ; mapped ; 7FFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F979 +2F97A ; mapped ; 8005 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97A +2F97B ; mapped ; 264DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97B +2F97C ; mapped ; 26523 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97C +2F97D ; mapped ; 8060 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97D +2F97E ; mapped ; 265A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97E +2F97F ; mapped ; 8070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97F +2F980 ; mapped ; 2335F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F980 +2F981 ; mapped ; 43D5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F981 +2F982 ; mapped ; 80B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F982 +2F983 ; mapped ; 8103 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F983 +2F984 ; mapped ; 440B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F984 +2F985 ; mapped ; 813E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F985 +2F986 ; mapped ; 5AB5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F986 +2F987 ; mapped ; 267A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F987 +2F988 ; mapped ; 267B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F988 +2F989 ; mapped ; 23393 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F989 +2F98A ; mapped ; 2339C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98A +2F98B ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98B +2F98C ; mapped ; 8204 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98C +2F98D ; mapped ; 8F9E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98D +2F98E ; mapped ; 446B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98E +2F98F ; mapped ; 8291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98F +2F990 ; mapped ; 828B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F990 +2F991 ; mapped ; 829D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F991 +2F992 ; mapped ; 52B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F992 +2F993 ; mapped ; 82B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F993 +2F994 ; mapped ; 82B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F994 +2F995 ; mapped ; 82BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F995 +2F996 ; mapped ; 82E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F996 +2F997 ; mapped ; 26B3C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F997 +2F998 ; mapped ; 82E5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F998 +2F999 ; mapped ; 831D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F999 +2F99A ; mapped ; 8363 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99A +2F99B ; mapped ; 83AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99B +2F99C ; mapped ; 8323 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99C +2F99D ; mapped ; 83BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99D +2F99E ; mapped ; 83E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99E +2F99F ; mapped ; 8457 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99F +2F9A0 ; mapped ; 8353 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A0 +2F9A1 ; mapped ; 83CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A1 +2F9A2 ; mapped ; 83CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A2 +2F9A3 ; mapped ; 83DC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A3 +2F9A4 ; mapped ; 26C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A4 +2F9A5 ; mapped ; 26D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A5 +2F9A6 ; mapped ; 26CD5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A6 +2F9A7 ; mapped ; 452B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A7 +2F9A8 ; mapped ; 84F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A8 +2F9A9 ; mapped ; 84F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A9 +2F9AA ; mapped ; 8516 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AA +2F9AB ; mapped ; 273CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AB +2F9AC ; mapped ; 8564 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AC +2F9AD ; mapped ; 26F2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AD +2F9AE ; mapped ; 455D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AE +2F9AF ; mapped ; 4561 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AF +2F9B0 ; mapped ; 26FB1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B0 +2F9B1 ; mapped ; 270D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B1 +2F9B2 ; mapped ; 456B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B2 +2F9B3 ; mapped ; 8650 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B3 +2F9B4 ; mapped ; 865C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B4 +2F9B5 ; mapped ; 8667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B5 +2F9B6 ; mapped ; 8669 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B6 +2F9B7 ; mapped ; 86A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B7 +2F9B8 ; mapped ; 8688 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B8 +2F9B9 ; mapped ; 870E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B9 +2F9BA ; mapped ; 86E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BA +2F9BB ; mapped ; 8779 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BB +2F9BC ; mapped ; 8728 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BC +2F9BD ; mapped ; 876B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BD +2F9BE ; mapped ; 8786 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BE +2F9BF ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BF +2F9C0 ; mapped ; 87E1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C0 +2F9C1 ; mapped ; 8801 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C1 +2F9C2 ; mapped ; 45F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C2 +2F9C3 ; mapped ; 8860 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C3 +2F9C4 ; mapped ; 8863 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C4 +2F9C5 ; mapped ; 27667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C5 +2F9C6 ; mapped ; 88D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C6 +2F9C7 ; mapped ; 88DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C7 +2F9C8 ; mapped ; 4635 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C8 +2F9C9 ; mapped ; 88FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C9 +2F9CA ; mapped ; 34BB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CA +2F9CB ; mapped ; 278AE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CB +2F9CC ; mapped ; 27966 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CC +2F9CD ; mapped ; 46BE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CD +2F9CE ; mapped ; 46C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CE +2F9CF ; mapped ; 8AA0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CF +2F9D0 ; mapped ; 8AED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D0 +2F9D1 ; mapped ; 8B8A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D1 +2F9D2 ; mapped ; 8C55 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D2 +2F9D3 ; mapped ; 27CA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D3 +2F9D4 ; mapped ; 8CAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D4 +2F9D5 ; mapped ; 8CC1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D5 +2F9D6 ; mapped ; 8D1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D6 +2F9D7 ; mapped ; 8D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D7 +2F9D8 ; mapped ; 27F2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D8 +2F9D9 ; mapped ; 20804 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D9 +2F9DA ; mapped ; 8DCB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DA +2F9DB ; mapped ; 8DBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DB +2F9DC ; mapped ; 8DF0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DC +2F9DD ; mapped ; 208DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DD +2F9DE ; mapped ; 8ED4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DE +2F9DF ; mapped ; 8F38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DF +2F9E0 ; mapped ; 285D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E0 +2F9E1 ; mapped ; 285ED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E1 +2F9E2 ; mapped ; 9094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E2 +2F9E3 ; mapped ; 90F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E3 +2F9E4 ; mapped ; 9111 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E4 +2F9E5 ; mapped ; 2872E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E5 +2F9E6 ; mapped ; 911B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E6 +2F9E7 ; mapped ; 9238 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E7 +2F9E8 ; mapped ; 92D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E8 +2F9E9 ; mapped ; 92D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E9 +2F9EA ; mapped ; 927C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EA +2F9EB ; mapped ; 93F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EB +2F9EC ; mapped ; 9415 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EC +2F9ED ; mapped ; 28BFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9ED +2F9EE ; mapped ; 958B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EE +2F9EF ; mapped ; 4995 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EF +2F9F0 ; mapped ; 95B7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F0 +2F9F1 ; mapped ; 28D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F1 +2F9F2 ; mapped ; 49E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F2 +2F9F3 ; mapped ; 96C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F3 +2F9F4 ; mapped ; 5DB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F4 +2F9F5 ; mapped ; 9723 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F5 +2F9F6 ; mapped ; 29145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F6 +2F9F7 ; mapped ; 2921A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F7 +2F9F8 ; mapped ; 4A6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F8 +2F9F9 ; mapped ; 4A76 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F9 +2F9FA ; mapped ; 97E0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FA +2F9FB ; mapped ; 2940A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FB +2F9FC ; mapped ; 4AB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FC +2F9FD ; mapped ; 29496 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FD +2F9FE..2F9FF ; mapped ; 980B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FE..CJK COMPATIBILITY IDEOGRAPH-2F9FF +2FA00 ; mapped ; 9829 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA00 +2FA01 ; mapped ; 295B6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA01 +2FA02 ; mapped ; 98E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA02 +2FA03 ; mapped ; 4B33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA03 +2FA04 ; mapped ; 9929 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA04 +2FA05 ; mapped ; 99A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA05 +2FA06 ; mapped ; 99C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA06 +2FA07 ; mapped ; 99FE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA07 +2FA08 ; mapped ; 4BCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA08 +2FA09 ; mapped ; 29B30 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA09 +2FA0A ; mapped ; 9B12 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0A +2FA0B ; mapped ; 9C40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0B +2FA0C ; mapped ; 9CFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0C +2FA0D ; mapped ; 4CCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0D +2FA0E ; mapped ; 4CED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0E +2FA0F ; mapped ; 9D67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0F +2FA10 ; mapped ; 2A0CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA10 +2FA11 ; mapped ; 4CF8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA11 +2FA12 ; mapped ; 2A105 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA12 +2FA13 ; mapped ; 2A20E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA13 +2FA14 ; mapped ; 2A291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA14 +2FA15 ; mapped ; 9EBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA15 +2FA16 ; mapped ; 4D56 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA16 +2FA17 ; mapped ; 9EF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA17 +2FA18 ; mapped ; 9EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA18 +2FA19 ; mapped ; 9F05 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA19 +2FA1A ; mapped ; 9F0F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1A +2FA1B ; mapped ; 9F16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1B +2FA1C ; mapped ; 9F3B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1C +2FA1D ; mapped ; 2A600 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1D +2FA1E..2FFFD ; disallowed # NA .. +2FFFE..2FFFF ; disallowed # 2.0 .. +30000..3FFFD ; disallowed # NA .. +3FFFE..3FFFF ; disallowed # 2.0 .. +40000..4FFFD ; disallowed # NA .. +4FFFE..4FFFF ; disallowed # 2.0 .. +50000..5FFFD ; disallowed # NA .. +5FFFE..5FFFF ; disallowed # 2.0 .. +60000..6FFFD ; disallowed # NA .. +6FFFE..6FFFF ; disallowed # 2.0 .. +70000..7FFFD ; disallowed # NA .. +7FFFE..7FFFF ; disallowed # 2.0 .. +80000..8FFFD ; disallowed # NA .. +8FFFE..8FFFF ; disallowed # 2.0 .. +90000..9FFFD ; disallowed # NA .. +9FFFE..9FFFF ; disallowed # 2.0 .. +A0000..AFFFD ; disallowed # NA .. +AFFFE..AFFFF ; disallowed # 2.0 .. +B0000..BFFFD ; disallowed # NA .. +BFFFE..BFFFF ; disallowed # 2.0 .. +C0000..CFFFD ; disallowed # NA .. +CFFFE..CFFFF ; disallowed # 2.0 .. +D0000..DFFFD ; disallowed # NA .. +DFFFE..DFFFF ; disallowed # 2.0 .. +E0000 ; disallowed # NA +E0001 ; disallowed # 3.1 LANGUAGE TAG +E0002..E001F ; disallowed # NA .. +E0020..E007F ; disallowed # 3.1 TAG SPACE..CANCEL TAG +E0080..E00FF ; disallowed # NA .. +E0100..E01EF ; ignored # 4.0 VARIATION SELECTOR-17..VARIATION SELECTOR-256 +E01F0..EFFFD ; disallowed # NA .. +EFFFE..EFFFF ; disallowed # 2.0 .. +F0000..FFFFD ; disallowed # 2.0 .. +FFFFE..FFFFF ; disallowed # 2.0 .. +100000..10FFFD; disallowed # 2.0 .. +10FFFE..10FFFF; disallowed # 2.0 .. + +# Total code points: 1114112 + diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/lib.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/lib.rs new file mode 100644 index 000000000..bdeafe448 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/lib.rs @@ -0,0 +1,73 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This Rust crate implements IDNA +//! [per the WHATWG URL Standard](https://url.spec.whatwg.org/#idna). +//! +//! It also exposes the underlying algorithms from [*Unicode IDNA Compatibility Processing* +//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) +//! and [Punycode (RFC 3492)](https://tools.ietf.org/html/rfc3492). +//! +//! Quoting from [UTS #46’s introduction](http://www.unicode.org/reports/tr46/#Introduction): +//! +//! > Initially, domain names were restricted to ASCII characters. +//! > A system was introduced in 2003 for internationalized domain names (IDN). +//! > This system is called Internationalizing Domain Names for Applications, +//! > or IDNA2003 for short. +//! > This mechanism supports IDNs by means of a client software transformation +//! > into a format known as Punycode. +//! > A revision of IDNA was approved in 2010 (IDNA2008). +//! > This revision has a number of incompatibilities with IDNA2003. +//! > +//! > The incompatibilities force implementers of client software, +//! > such as browsers and emailers, +//! > to face difficult choices during the transition period +//! > as registries shift from IDNA2003 to IDNA2008. +//! > This document specifies a mechanism +//! > that minimizes the impact of this transition for client software, +//! > allowing client software to access domains that are valid under either system. + +#[macro_use] extern crate matches; +extern crate unicode_bidi; +extern crate unicode_normalization; + +pub mod punycode; +pub mod uts46; + +/// The [domain to ASCII](https://url.spec.whatwg.org/#concept-domain-to-ascii) algorithm. +/// +/// Return the ASCII representation a domain name, +/// normalizing characters (upper-case to lower-case and other kinds of equivalence) +/// and using Punycode as necessary. +/// +/// This process may fail. +pub fn domain_to_ascii(domain: &str) -> Result { + uts46::to_ascii(domain, uts46::Flags { + use_std3_ascii_rules: false, + transitional_processing: true, // XXX: switch when Firefox does + verify_dns_length: false, + }) +} + +/// The [domain to Unicode](https://url.spec.whatwg.org/#concept-domain-to-unicode) algorithm. +/// +/// Return the Unicode representation of a domain name, +/// normalizing characters (upper-case to lower-case and other kinds of equivalence) +/// and decoding Punycode as necessary. +/// +/// This may indicate [syntax violations](https://url.spec.whatwg.org/#syntax-violation) +/// but always returns a string for the mapped domain. +pub fn domain_to_unicode(domain: &str) -> (String, Result<(), uts46::Errors>) { + uts46::to_unicode(domain, uts46::Flags { + use_std3_ascii_rules: false, + + // Unused: + transitional_processing: true, + verify_dns_length: false, + }) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/make_uts46_mapping_table.py b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/make_uts46_mapping_table.py new file mode 100644 index 000000000..3e5d0962f --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/make_uts46_mapping_table.py @@ -0,0 +1,139 @@ +# Copyright 2013-2014 The rust-url developers. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +# Run as: python make_uts46_mapping_table.py IdnaMappingTable.txt > uts46_mapping_table.rs +# You can get the latest idna table from +# http://www.unicode.org/Public/idna/latest/IdnaMappingTable.txt + +import collections +import itertools + +print('''\ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Generated by make_idna_table.py + +static TABLE: &'static [Range] = &[ +''') + +txt = open("IdnaMappingTable.txt") + +def escape_char(c): + return "\\u{%x}" % ord(c[0]) + +def char(s): + return unichr(int(s, 16)) + +strtab = collections.OrderedDict() +strtab_offset = 0 + +def strtab_slice(s): + global strtab, strtab_offset + + if s in strtab: + return strtab[s] + else: + utf8_len = len(s.encode('utf8')) + c = (strtab_offset, utf8_len) + strtab[s] = c + strtab_offset += utf8_len + return c + +def rust_slice(s): + start = s[0] + length = s[1] + start_lo = start & 0xff + start_hi = start >> 8 + assert length <= 255 + assert start_hi <= 255 + return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length) + +ranges = [] + +for line in txt: + # remove comments + line, _, _ = line.partition('#') + # skip empty lines + if len(line.strip()) == 0: + continue + fields = line.split(';') + if fields[0].strip() == 'D800..DFFF': + continue # Surrogates don't occur in Rust strings. + first, _, last = fields[0].strip().partition('..') + if not last: + last = first + mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '') + unicode_str = None + if len(fields) > 2: + if fields[2].strip(): + unicode_str = u''.join(char(c) for c in fields[2].strip().split(' ')) + elif mapping == "Deviation": + unicode_str = u'' + ranges.append((first, last, mapping, unicode_str)) + +def mergeable_key(r): + mapping = r[2] + # These types have associated data, so we should not merge them. + if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'): + return r + assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid') + return mapping + +grouped_ranges = itertools.groupby(ranges, key=mergeable_key) + +optimized_ranges = [] + +for (k, g) in grouped_ranges: + group = list(g) + if len(group) == 1: + optimized_ranges.append(group[0]) + continue + # Assert that nothing in the group has an associated unicode string. + for g in group: + if len(g[3]) > 2: + assert not g[3][2].strip() + # Assert that consecutive members of the group don't leave gaps in + # the codepoint space. + a, b = itertools.tee(group) + next(b, None) + for (g1, g2) in itertools.izip(a, b): + last_char = int(g1[1], 16) + next_char = int(g2[0], 16) + if last_char + 1 == next_char: + continue + # There's a gap where surrogates would appear, but we don't have to + # worry about that gap, as surrogates never appear in Rust strings. + # Assert we're seeing the surrogate case here. + assert last_char == 0xd7ff + assert next_char == 0xe000 + first = group[0][0] + last = group[-1][1] + mapping = group[0][2] + unicode_str = group[0][3] + optimized_ranges.append((first, last, mapping, unicode_str)) + +for (first, last, mapping, unicode_str) in optimized_ranges: + if unicode_str is not None: + mapping += rust_slice(strtab_slice(unicode_str)) + print(" Range { from: '%s', to: '%s', mapping: %s }," % (escape_char(char(first)), + escape_char(char(last)), + mapping)) + +print("];\n") + +def escape_str(s): + return [escape_char(c) for c in s] + +print("static STRING_TABLE: &'static str = \"%s\";" + % '\\\n '.join(itertools.chain(*[escape_str(s) for s in strtab.iterkeys()]))) diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/punycode.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/punycode.rs new file mode 100644 index 000000000..75bb1d6e8 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/punycode.rs @@ -0,0 +1,212 @@ +// Copyright 2013 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Punycode ([RFC 3492](http://tools.ietf.org/html/rfc3492)) implementation. +//! +//! Since Punycode fundamentally works on unicode code points, +//! `encode` and `decode` take and return slices and vectors of `char`. +//! `encode_str` and `decode_to_string` provide convenience wrappers +//! that convert from and to Rust’s UTF-8 based `str` and `String` types. + +use std::u32; +use std::char; +use std::ascii::AsciiExt; + +// Bootstring parameters for Punycode +static BASE: u32 = 36; +static T_MIN: u32 = 1; +static T_MAX: u32 = 26; +static SKEW: u32 = 38; +static DAMP: u32 = 700; +static INITIAL_BIAS: u32 = 72; +static INITIAL_N: u32 = 0x80; +static DELIMITER: char = '-'; + + +#[inline] +fn adapt(mut delta: u32, num_points: u32, first_time: bool) -> u32 { + delta /= if first_time { DAMP } else { 2 }; + delta += delta / num_points; + let mut k = 0; + while delta > ((BASE - T_MIN) * T_MAX) / 2 { + delta /= BASE - T_MIN; + k += BASE; + } + k + (((BASE - T_MIN + 1) * delta) / (delta + SKEW)) +} + + +/// Convert Punycode to an Unicode `String`. +/// +/// This is a convenience wrapper around `decode`. +#[inline] +pub fn decode_to_string(input: &str) -> Option { + decode(input).map(|chars| chars.into_iter().collect()) +} + + +/// Convert Punycode to Unicode. +/// +/// Return None on malformed input or overflow. +/// Overflow can only happen on inputs that take more than +/// 63 encoded bytes, the DNS limit on domain name labels. +pub fn decode(input: &str) -> Option> { + // Handle "basic" (ASCII) code points. + // They are encoded as-is before the last delimiter, if any. + let (mut output, input) = match input.rfind(DELIMITER) { + None => (Vec::new(), input), + Some(position) => ( + input[..position].chars().collect(), + if position > 0 { &input[position + 1..] } else { input } + ) + }; + let mut code_point = INITIAL_N; + let mut bias = INITIAL_BIAS; + let mut i = 0; + let mut iter = input.bytes(); + loop { + let previous_i = i; + let mut weight = 1; + let mut k = BASE; + let mut byte = match iter.next() { + None => break, + Some(byte) => byte, + }; + // Decode a generalized variable-length integer into delta, + // which gets added to i. + loop { + let digit = match byte { + byte @ b'0' ... b'9' => byte - b'0' + 26, + byte @ b'A' ... b'Z' => byte - b'A', + byte @ b'a' ... b'z' => byte - b'a', + _ => return None + } as u32; + if digit > (u32::MAX - i) / weight { + return None // Overflow + } + i += digit * weight; + let t = if k <= bias { T_MIN } + else if k >= bias + T_MAX { T_MAX } + else { k - bias }; + if digit < t { + break + } + if weight > u32::MAX / (BASE - t) { + return None // Overflow + } + weight *= BASE - t; + k += BASE; + byte = match iter.next() { + None => return None, // End of input before the end of this delta + Some(byte) => byte, + }; + } + let length = output.len() as u32; + bias = adapt(i - previous_i, length + 1, previous_i == 0); + if i / (length + 1) > u32::MAX - code_point { + return None // Overflow + } + // i was supposed to wrap around from length+1 to 0, + // incrementing code_point each time. + code_point += i / (length + 1); + i %= length + 1; + let c = match char::from_u32(code_point) { + Some(c) => c, + None => return None + }; + output.insert(i as usize, c); + i += 1; + } + Some(output) +} + + +/// Convert an Unicode `str` to Punycode. +/// +/// This is a convenience wrapper around `encode`. +#[inline] +pub fn encode_str(input: &str) -> Option { + encode(&input.chars().collect::>()) +} + + +/// Convert Unicode to Punycode. +/// +/// Return None on overflow, which can only happen on inputs that would take more than +/// 63 encoded bytes, the DNS limit on domain name labels. +pub fn encode(input: &[char]) -> Option { + // Handle "basic" (ASCII) code points. They are encoded as-is. + let output_bytes = input.iter().filter_map(|&c| + if c.is_ascii() { Some(c as u8) } else { None } + ).collect(); + let mut output = unsafe { String::from_utf8_unchecked(output_bytes) }; + let basic_length = output.len() as u32; + if basic_length > 0 { + output.push_str("-") + } + let mut code_point = INITIAL_N; + let mut delta = 0; + let mut bias = INITIAL_BIAS; + let mut processed = basic_length; + let input_length = input.len() as u32; + while processed < input_length { + // All code points < code_point have been handled already. + // Find the next larger one. + let min_code_point = input.iter().map(|&c| c as u32) + .filter(|&c| c >= code_point).min().unwrap(); + if min_code_point - code_point > (u32::MAX - delta) / (processed + 1) { + return None // Overflow + } + // Increase delta to advance the decoder’s state to + delta += (min_code_point - code_point) * (processed + 1); + code_point = min_code_point; + for &c in input { + let c = c as u32; + if c < code_point { + delta += 1; + if delta == 0 { + return None // Overflow + } + } + if c == code_point { + // Represent delta as a generalized variable-length integer: + let mut q = delta; + let mut k = BASE; + loop { + let t = if k <= bias { T_MIN } + else if k >= bias + T_MAX { T_MAX } + else { k - bias }; + if q < t { + break + } + let value = t + ((q - t) % (BASE - t)); + output.push(value_to_digit(value)); + q = (q - t) / (BASE - t); + k += BASE; + } + output.push(value_to_digit(q)); + bias = adapt(delta, processed + 1, processed == basic_length); + delta = 0; + processed += 1; + } + } + delta += 1; + code_point += 1; + } + Some(output) +} + + +#[inline] +fn value_to_digit(value: u32) -> char { + match value { + 0 ... 25 => (value as u8 + 'a' as u8) as char, // a..z + 26 ... 35 => (value as u8 - 26 + '0' as u8) as char, // 0..9 + _ => panic!() + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46.rs new file mode 100644 index 000000000..7115bd8ff --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46.rs @@ -0,0 +1,415 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! [*Unicode IDNA Compatibility Processing* +//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) + +use self::Mapping::*; +use punycode; +use std::ascii::AsciiExt; +use std::cmp::Ordering::{Equal, Less, Greater}; +use unicode_bidi::{BidiClass, bidi_class}; +use unicode_normalization::UnicodeNormalization; +use unicode_normalization::char::is_combining_mark; + +include!("uts46_mapping_table.rs"); + + +pub static PUNYCODE_PREFIX: &'static str = "xn--"; + + +#[derive(Debug)] +struct StringTableSlice { + // Store these as separate fields so the structure will have an + // alignment of 1 and thus pack better into the Mapping enum, below. + byte_start_lo: u8, + byte_start_hi: u8, + byte_len: u8, +} + +fn decode_slice(slice: &StringTableSlice) -> &'static str { + let lo = slice.byte_start_lo as usize; + let hi = slice.byte_start_hi as usize; + let start = (hi << 8) | lo; + let len = slice.byte_len as usize; + &STRING_TABLE[start..(start + len)] +} + +#[repr(u8)] +#[derive(Debug)] +enum Mapping { + Valid, + Ignored, + Mapped(StringTableSlice), + Deviation(StringTableSlice), + Disallowed, + DisallowedStd3Valid, + DisallowedStd3Mapped(StringTableSlice), +} + +struct Range { + from: char, + to: char, + mapping: Mapping, +} + +fn find_char(codepoint: char) -> &'static Mapping { + let r = TABLE.binary_search_by(|ref range| { + if codepoint > range.to { + Less + } else if codepoint < range.from { + Greater + } else { + Equal + } + }); + r.ok().map(|i| &TABLE[i].mapping).unwrap() +} + +fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec) { + match *find_char(codepoint) { + Mapping::Valid => output.push(codepoint), + Mapping::Ignored => {}, + Mapping::Mapped(ref slice) => output.push_str(decode_slice(slice)), + Mapping::Deviation(ref slice) => { + if flags.transitional_processing { + output.push_str(decode_slice(slice)) + } else { + output.push(codepoint) + } + } + Mapping::Disallowed => { + errors.push(Error::DissallowedCharacter); + output.push(codepoint); + } + Mapping::DisallowedStd3Valid => { + if flags.use_std3_ascii_rules { + errors.push(Error::DissallowedByStd3AsciiRules); + } + output.push(codepoint) + } + Mapping::DisallowedStd3Mapped(ref slice) => { + if flags.use_std3_ascii_rules { + errors.push(Error::DissallowedMappedInStd3); + } + output.push_str(decode_slice(slice)) + } + } +} + +// http://tools.ietf.org/html/rfc5893#section-2 +fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool { + // Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label + // is RTL if it contains at least one character of bidi class R, AL or AN. + if !is_bidi_domain { + return true; + } + + let mut chars = label.chars(); + let first_char_class = match chars.next() { + Some(c) => bidi_class(c), + None => return true, // empty string + }; + + match first_char_class { + // LTR label + BidiClass::L => { + // Rule 5 + loop { + match chars.next() { + Some(c) => { + if !matches!(bidi_class(c), + BidiClass::L | BidiClass::EN | + BidiClass::ES | BidiClass::CS | + BidiClass::ET | BidiClass::ON | + BidiClass::BN | BidiClass::NSM + ) { + return false; + } + }, + None => { break; }, + } + } + + // Rule 6 + // must end in L or EN followed by 0 or more NSM + let mut rev_chars = label.chars().rev(); + let mut last_non_nsm = rev_chars.next(); + loop { + match last_non_nsm { + Some(c) if bidi_class(c) == BidiClass::NSM => { + last_non_nsm = rev_chars.next(); + continue; + } + _ => { break; }, + } + } + match last_non_nsm { + Some(c) if bidi_class(c) == BidiClass::L + || bidi_class(c) == BidiClass::EN => {}, + Some(_) => { return false; }, + _ => {} + } + + } + + // RTL label + BidiClass::R | BidiClass::AL => { + let mut found_en = false; + let mut found_an = false; + + // Rule 2 + loop { + match chars.next() { + Some(c) => { + let char_class = bidi_class(c); + + if char_class == BidiClass::EN { + found_en = true; + } + if char_class == BidiClass::AN { + found_an = true; + } + + if !matches!(char_class, BidiClass::R | BidiClass::AL | + BidiClass::AN | BidiClass::EN | + BidiClass::ES | BidiClass::CS | + BidiClass::ET | BidiClass::ON | + BidiClass::BN | BidiClass::NSM) { + return false; + } + }, + None => { break; }, + } + } + // Rule 3 + let mut rev_chars = label.chars().rev(); + let mut last = rev_chars.next(); + loop { // must end in L or EN followed by 0 or more NSM + match last { + Some(c) if bidi_class(c) == BidiClass::NSM => { + last = rev_chars.next(); + continue; + } + _ => { break; }, + } + } + match last { + Some(c) if matches!(bidi_class(c), BidiClass::R | BidiClass::AL | + BidiClass::EN | BidiClass::AN) => {}, + _ => { return false; } + } + + // Rule 4 + if found_an && found_en { + return false; + } + } + + // Rule 1: Should start with L or R/AL + _ => { + return false; + } + } + + return true; +} + +/// http://www.unicode.org/reports/tr46/#Validity_Criteria +fn validate(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec) { + let first_char = label.chars().next(); + if first_char == None { + // Empty string, pass + } + + // V1: Must be in NFC form. + else if label.nfc().ne(label.chars()) { + errors.push(Error::ValidityCriteria); + } + + // V2: No U+002D HYPHEN-MINUS in both third and fourth positions. + // + // NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the + // third and fourth positions. But nobody follows this criteria. See the spec issue below: + // https://github.com/whatwg/url/issues/53 + // + // TODO: Add *CheckHyphens* flag. + + // V3: neither begin nor end with a U+002D HYPHEN-MINUS + else if label.starts_with("-") || label.ends_with("-") { + errors.push(Error::ValidityCriteria); + } + + // V4: not contain a U+002E FULL STOP + // + // Here, label can't contain '.' since the input is from .split('.') + + // V5: not begin with a GC=Mark + else if is_combining_mark(first_char.unwrap()) { + errors.push(Error::ValidityCriteria); + } + + // V6: Check against Mapping Table + else if label.chars().any(|c| match *find_char(c) { + Mapping::Valid => false, + Mapping::Deviation(_) => flags.transitional_processing, + Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules, + _ => true, + }) { + errors.push(Error::ValidityCriteria); + } + + // V7: ContextJ rules + // + // TODO: Implement rules and add *CheckJoiners* flag. + + // V8: Bidi rules + // + // TODO: Add *CheckBidi* flag + else if !passes_bidi(label, is_bidi_domain) + { + errors.push(Error::ValidityCriteria); + } +} + +/// http://www.unicode.org/reports/tr46/#Processing +fn processing(domain: &str, flags: Flags, errors: &mut Vec) -> String { + let mut mapped = String::new(); + for c in domain.chars() { + map_char(c, flags, &mut mapped, errors) + } + let normalized: String = mapped.nfc().collect(); + + // Find out if it's a Bidi Domain Name + // + // First, check for literal bidi chars + let mut is_bidi_domain = domain.chars().any(|c| + matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) + ); + if !is_bidi_domain { + // Then check for punycode-encoded bidi chars + for label in normalized.split('.') { + if label.starts_with(PUNYCODE_PREFIX) { + match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { + Some(decoded_label) => { + if decoded_label.chars().any(|c| + matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) + ) { + is_bidi_domain = true; + } + } + None => { + is_bidi_domain = true; + } + } + } + } + } + + let mut validated = String::new(); + let mut first = true; + for label in normalized.split('.') { + if !first { + validated.push('.'); + } + first = false; + if label.starts_with(PUNYCODE_PREFIX) { + match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { + Some(decoded_label) => { + let flags = Flags { transitional_processing: false, ..flags }; + validate(&decoded_label, is_bidi_domain, flags, errors); + validated.push_str(&decoded_label) + } + None => errors.push(Error::PunycodeError) + } + } else { + validate(label, is_bidi_domain, flags, errors); + validated.push_str(label) + } + } + validated +} + +#[derive(Copy, Clone)] +pub struct Flags { + pub use_std3_ascii_rules: bool, + pub transitional_processing: bool, + pub verify_dns_length: bool, +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +enum Error { + PunycodeError, + ValidityCriteria, + DissallowedByStd3AsciiRules, + DissallowedMappedInStd3, + DissallowedCharacter, + TooLongForDns, + TooShortForDns, +} + +/// Errors recorded during UTS #46 processing. +/// +/// This is opaque for now, only indicating the presence of at least one error. +/// More details may be exposed in the future. +#[derive(Debug)] +pub struct Errors(Vec); + +/// http://www.unicode.org/reports/tr46/#ToASCII +pub fn to_ascii(domain: &str, flags: Flags) -> Result { + let mut errors = Vec::new(); + let mut result = String::new(); + let mut first = true; + for label in processing(domain, flags, &mut errors).split('.') { + if !first { + result.push('.'); + } + first = false; + if label.is_ascii() { + result.push_str(label); + } else { + match punycode::encode_str(label) { + Some(x) => { + result.push_str(PUNYCODE_PREFIX); + result.push_str(&x); + }, + None => errors.push(Error::PunycodeError) + } + } + } + + if flags.verify_dns_length { + let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result }; + if domain.len() < 1 || domain.split('.').any(|label| label.len() < 1) { + errors.push(Error::TooShortForDns) + } + if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) { + errors.push(Error::TooLongForDns) + } + } + if errors.is_empty() { + Ok(result) + } else { + Err(Errors(errors)) + } +} + +/// http://www.unicode.org/reports/tr46/#ToUnicode +/// +/// Only `use_std3_ascii_rules` is used in `flags`. +pub fn to_unicode(domain: &str, mut flags: Flags) -> (String, Result<(), Errors>) { + flags.transitional_processing = false; + let mut errors = Vec::new(); + let domain = processing(domain, flags, &mut errors); + let errors = if errors.is_empty() { + Ok(()) + } else { + Err(Errors(errors)) + }; + (domain, errors) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46_mapping_table.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46_mapping_table.rs new file mode 100644 index 000000000..a0f68a69f --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/src/uts46_mapping_table.rs @@ -0,0 +1,12822 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Generated by make_idna_table.py + +static TABLE: &'static [Range] = &[ + + Range { from: '\u{0}', to: '\u{2c}', mapping: DisallowedStd3Valid }, + Range { from: '\u{2d}', to: '\u{2e}', mapping: Valid }, + Range { from: '\u{2f}', to: '\u{2f}', mapping: DisallowedStd3Valid }, + Range { from: '\u{30}', to: '\u{39}', mapping: Valid }, + Range { from: '\u{3a}', to: '\u{40}', mapping: DisallowedStd3Valid }, + Range { from: '\u{41}', to: '\u{41}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{42}', to: '\u{42}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{43}', to: '\u{43}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{44}', to: '\u{44}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{45}', to: '\u{45}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{46}', to: '\u{46}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{47}', to: '\u{47}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{48}', to: '\u{48}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{49}', to: '\u{49}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4a}', to: '\u{4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4b}', to: '\u{4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4c}', to: '\u{4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4d}', to: '\u{4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4e}', to: '\u{4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{4f}', to: '\u{4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{50}', to: '\u{50}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{51}', to: '\u{51}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{52}', to: '\u{52}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{53}', to: '\u{53}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{54}', to: '\u{54}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{55}', to: '\u{55}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{56}', to: '\u{56}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{57}', to: '\u{57}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{58}', to: '\u{58}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{59}', to: '\u{59}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{5a}', to: '\u{5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{5b}', to: '\u{60}', mapping: DisallowedStd3Valid }, + Range { from: '\u{61}', to: '\u{7a}', mapping: Valid }, + Range { from: '\u{7b}', to: '\u{7f}', mapping: DisallowedStd3Valid }, + Range { from: '\u{80}', to: '\u{9f}', mapping: Disallowed }, + Range { from: '\u{a0}', to: '\u{a0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{a1}', to: '\u{a7}', mapping: Valid }, + Range { from: '\u{a8}', to: '\u{a8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{a9}', to: '\u{a9}', mapping: Valid }, + Range { from: '\u{aa}', to: '\u{aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ab}', to: '\u{ac}', mapping: Valid }, + Range { from: '\u{ad}', to: '\u{ad}', mapping: Ignored }, + Range { from: '\u{ae}', to: '\u{ae}', mapping: Valid }, + Range { from: '\u{af}', to: '\u{af}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{b0}', to: '\u{b1}', mapping: Valid }, + Range { from: '\u{b2}', to: '\u{b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{b3}', to: '\u{b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{b4}', to: '\u{b4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{b5}', to: '\u{b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{b6}', to: '\u{b7}', mapping: Valid }, + Range { from: '\u{b8}', to: '\u{b8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{b9}', to: '\u{b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ba}', to: '\u{ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{bb}', to: '\u{bb}', mapping: Valid }, + Range { from: '\u{bc}', to: '\u{bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 0, byte_len: 5 }) }, + Range { from: '\u{bd}', to: '\u{bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 0, byte_len: 5 }) }, + Range { from: '\u{be}', to: '\u{be}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 0, byte_len: 5 }) }, + Range { from: '\u{bf}', to: '\u{bf}', mapping: Valid }, + Range { from: '\u{c0}', to: '\u{c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c1}', to: '\u{c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c2}', to: '\u{c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c3}', to: '\u{c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c4}', to: '\u{c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c5}', to: '\u{c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c6}', to: '\u{c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c7}', to: '\u{c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c8}', to: '\u{c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{c9}', to: '\u{c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{ca}', to: '\u{ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{cb}', to: '\u{cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{cc}', to: '\u{cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{cd}', to: '\u{cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{ce}', to: '\u{ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{cf}', to: '\u{cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d0}', to: '\u{d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d1}', to: '\u{d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d2}', to: '\u{d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d3}', to: '\u{d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d4}', to: '\u{d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d5}', to: '\u{d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d6}', to: '\u{d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d7}', to: '\u{d7}', mapping: Valid }, + Range { from: '\u{d8}', to: '\u{d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{d9}', to: '\u{d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{da}', to: '\u{da}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{db}', to: '\u{db}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{dc}', to: '\u{dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{dd}', to: '\u{dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{de}', to: '\u{de}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{df}', to: '\u{df}', mapping: Deviation(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{e0}', to: '\u{ff}', mapping: Valid }, + Range { from: '\u{100}', to: '\u{100}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{101}', to: '\u{101}', mapping: Valid }, + Range { from: '\u{102}', to: '\u{102}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{103}', to: '\u{103}', mapping: Valid }, + Range { from: '\u{104}', to: '\u{104}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{105}', to: '\u{105}', mapping: Valid }, + Range { from: '\u{106}', to: '\u{106}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{107}', to: '\u{107}', mapping: Valid }, + Range { from: '\u{108}', to: '\u{108}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{109}', to: '\u{109}', mapping: Valid }, + Range { from: '\u{10a}', to: '\u{10a}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{10b}', to: '\u{10b}', mapping: Valid }, + Range { from: '\u{10c}', to: '\u{10c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{10d}', to: '\u{10d}', mapping: Valid }, + Range { from: '\u{10e}', to: '\u{10e}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{10f}', to: '\u{10f}', mapping: Valid }, + Range { from: '\u{110}', to: '\u{110}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{111}', to: '\u{111}', mapping: Valid }, + Range { from: '\u{112}', to: '\u{112}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{113}', to: '\u{113}', mapping: Valid }, + Range { from: '\u{114}', to: '\u{114}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{115}', to: '\u{115}', mapping: Valid }, + Range { from: '\u{116}', to: '\u{116}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{117}', to: '\u{117}', mapping: Valid }, + Range { from: '\u{118}', to: '\u{118}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{119}', to: '\u{119}', mapping: Valid }, + Range { from: '\u{11a}', to: '\u{11a}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{11b}', to: '\u{11b}', mapping: Valid }, + Range { from: '\u{11c}', to: '\u{11c}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{11d}', to: '\u{11d}', mapping: Valid }, + Range { from: '\u{11e}', to: '\u{11e}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{11f}', to: '\u{11f}', mapping: Valid }, + Range { from: '\u{120}', to: '\u{120}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{121}', to: '\u{121}', mapping: Valid }, + Range { from: '\u{122}', to: '\u{122}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{123}', to: '\u{123}', mapping: Valid }, + Range { from: '\u{124}', to: '\u{124}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{125}', to: '\u{125}', mapping: Valid }, + Range { from: '\u{126}', to: '\u{126}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{127}', to: '\u{127}', mapping: Valid }, + Range { from: '\u{128}', to: '\u{128}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{129}', to: '\u{129}', mapping: Valid }, + Range { from: '\u{12a}', to: '\u{12a}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{12b}', to: '\u{12b}', mapping: Valid }, + Range { from: '\u{12c}', to: '\u{12c}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{12d}', to: '\u{12d}', mapping: Valid }, + Range { from: '\u{12e}', to: '\u{12e}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{12f}', to: '\u{12f}', mapping: Valid }, + Range { from: '\u{130}', to: '\u{130}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{131}', to: '\u{131}', mapping: Valid }, + Range { from: '\u{132}', to: '\u{133}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{134}', to: '\u{134}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{135}', to: '\u{135}', mapping: Valid }, + Range { from: '\u{136}', to: '\u{136}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{137}', to: '\u{138}', mapping: Valid }, + Range { from: '\u{139}', to: '\u{139}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{13a}', to: '\u{13a}', mapping: Valid }, + Range { from: '\u{13b}', to: '\u{13b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{13c}', to: '\u{13c}', mapping: Valid }, + Range { from: '\u{13d}', to: '\u{13d}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{13e}', to: '\u{13e}', mapping: Valid }, + Range { from: '\u{13f}', to: '\u{140}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{141}', to: '\u{141}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{142}', to: '\u{142}', mapping: Valid }, + Range { from: '\u{143}', to: '\u{143}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{144}', to: '\u{144}', mapping: Valid }, + Range { from: '\u{145}', to: '\u{145}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{146}', to: '\u{146}', mapping: Valid }, + Range { from: '\u{147}', to: '\u{147}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{148}', to: '\u{148}', mapping: Valid }, + Range { from: '\u{149}', to: '\u{149}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{14a}', to: '\u{14a}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{14b}', to: '\u{14b}', mapping: Valid }, + Range { from: '\u{14c}', to: '\u{14c}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{14d}', to: '\u{14d}', mapping: Valid }, + Range { from: '\u{14e}', to: '\u{14e}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{14f}', to: '\u{14f}', mapping: Valid }, + Range { from: '\u{150}', to: '\u{150}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{151}', to: '\u{151}', mapping: Valid }, + Range { from: '\u{152}', to: '\u{152}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{153}', to: '\u{153}', mapping: Valid }, + Range { from: '\u{154}', to: '\u{154}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{155}', to: '\u{155}', mapping: Valid }, + Range { from: '\u{156}', to: '\u{156}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{157}', to: '\u{157}', mapping: Valid }, + Range { from: '\u{158}', to: '\u{158}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{159}', to: '\u{159}', mapping: Valid }, + Range { from: '\u{15a}', to: '\u{15a}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{15b}', to: '\u{15b}', mapping: Valid }, + Range { from: '\u{15c}', to: '\u{15c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{15d}', to: '\u{15d}', mapping: Valid }, + Range { from: '\u{15e}', to: '\u{15e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{15f}', to: '\u{15f}', mapping: Valid }, + Range { from: '\u{160}', to: '\u{160}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{161}', to: '\u{161}', mapping: Valid }, + Range { from: '\u{162}', to: '\u{162}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{163}', to: '\u{163}', mapping: Valid }, + Range { from: '\u{164}', to: '\u{164}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{165}', to: '\u{165}', mapping: Valid }, + Range { from: '\u{166}', to: '\u{166}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{167}', to: '\u{167}', mapping: Valid }, + Range { from: '\u{168}', to: '\u{168}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{169}', to: '\u{169}', mapping: Valid }, + Range { from: '\u{16a}', to: '\u{16a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{16b}', to: '\u{16b}', mapping: Valid }, + Range { from: '\u{16c}', to: '\u{16c}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{16d}', to: '\u{16d}', mapping: Valid }, + Range { from: '\u{16e}', to: '\u{16e}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{16f}', to: '\u{16f}', mapping: Valid }, + Range { from: '\u{170}', to: '\u{170}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{171}', to: '\u{171}', mapping: Valid }, + Range { from: '\u{172}', to: '\u{172}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{173}', to: '\u{173}', mapping: Valid }, + Range { from: '\u{174}', to: '\u{174}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{175}', to: '\u{175}', mapping: Valid }, + Range { from: '\u{176}', to: '\u{176}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{177}', to: '\u{177}', mapping: Valid }, + Range { from: '\u{178}', to: '\u{178}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{179}', to: '\u{179}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{17a}', to: '\u{17a}', mapping: Valid }, + Range { from: '\u{17b}', to: '\u{17b}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{17c}', to: '\u{17c}', mapping: Valid }, + Range { from: '\u{17d}', to: '\u{17d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{17e}', to: '\u{17e}', mapping: Valid }, + Range { from: '\u{17f}', to: '\u{17f}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{180}', to: '\u{180}', mapping: Valid }, + Range { from: '\u{181}', to: '\u{181}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{182}', to: '\u{182}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{183}', to: '\u{183}', mapping: Valid }, + Range { from: '\u{184}', to: '\u{184}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{185}', to: '\u{185}', mapping: Valid }, + Range { from: '\u{186}', to: '\u{186}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{187}', to: '\u{187}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{188}', to: '\u{188}', mapping: Valid }, + Range { from: '\u{189}', to: '\u{189}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{18a}', to: '\u{18a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{18b}', to: '\u{18b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{18c}', to: '\u{18d}', mapping: Valid }, + Range { from: '\u{18e}', to: '\u{18e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{18f}', to: '\u{18f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{190}', to: '\u{190}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{191}', to: '\u{191}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{192}', to: '\u{192}', mapping: Valid }, + Range { from: '\u{193}', to: '\u{193}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{194}', to: '\u{194}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{195}', to: '\u{195}', mapping: Valid }, + Range { from: '\u{196}', to: '\u{196}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{197}', to: '\u{197}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{198}', to: '\u{198}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{199}', to: '\u{19b}', mapping: Valid }, + Range { from: '\u{19c}', to: '\u{19c}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{19d}', to: '\u{19d}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{19e}', to: '\u{19e}', mapping: Valid }, + Range { from: '\u{19f}', to: '\u{19f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a0}', to: '\u{1a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a1}', to: '\u{1a1}', mapping: Valid }, + Range { from: '\u{1a2}', to: '\u{1a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a3}', to: '\u{1a3}', mapping: Valid }, + Range { from: '\u{1a4}', to: '\u{1a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a5}', to: '\u{1a5}', mapping: Valid }, + Range { from: '\u{1a6}', to: '\u{1a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a7}', to: '\u{1a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1a8}', to: '\u{1a8}', mapping: Valid }, + Range { from: '\u{1a9}', to: '\u{1a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1aa}', to: '\u{1ab}', mapping: Valid }, + Range { from: '\u{1ac}', to: '\u{1ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ad}', to: '\u{1ad}', mapping: Valid }, + Range { from: '\u{1ae}', to: '\u{1ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1af}', to: '\u{1af}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b0}', to: '\u{1b0}', mapping: Valid }, + Range { from: '\u{1b1}', to: '\u{1b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b2}', to: '\u{1b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b3}', to: '\u{1b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b4}', to: '\u{1b4}', mapping: Valid }, + Range { from: '\u{1b5}', to: '\u{1b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b6}', to: '\u{1b6}', mapping: Valid }, + Range { from: '\u{1b7}', to: '\u{1b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b8}', to: '\u{1b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1b9}', to: '\u{1bb}', mapping: Valid }, + Range { from: '\u{1bc}', to: '\u{1bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1bd}', to: '\u{1c3}', mapping: Valid }, + Range { from: '\u{1c4}', to: '\u{1c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{1c7}', to: '\u{1c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ca}', to: '\u{1cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1cd}', to: '\u{1cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ce}', to: '\u{1ce}', mapping: Valid }, + Range { from: '\u{1cf}', to: '\u{1cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d0}', to: '\u{1d0}', mapping: Valid }, + Range { from: '\u{1d1}', to: '\u{1d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d2}', to: '\u{1d2}', mapping: Valid }, + Range { from: '\u{1d3}', to: '\u{1d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d4}', to: '\u{1d4}', mapping: Valid }, + Range { from: '\u{1d5}', to: '\u{1d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d6}', to: '\u{1d6}', mapping: Valid }, + Range { from: '\u{1d7}', to: '\u{1d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d8}', to: '\u{1d8}', mapping: Valid }, + Range { from: '\u{1d9}', to: '\u{1d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1da}', to: '\u{1da}', mapping: Valid }, + Range { from: '\u{1db}', to: '\u{1db}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1dc}', to: '\u{1dd}', mapping: Valid }, + Range { from: '\u{1de}', to: '\u{1de}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1df}', to: '\u{1df}', mapping: Valid }, + Range { from: '\u{1e0}', to: '\u{1e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1e1}', to: '\u{1e1}', mapping: Valid }, + Range { from: '\u{1e2}', to: '\u{1e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1e3}', to: '\u{1e3}', mapping: Valid }, + Range { from: '\u{1e4}', to: '\u{1e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1e5}', to: '\u{1e5}', mapping: Valid }, + Range { from: '\u{1e6}', to: '\u{1e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1e7}', to: '\u{1e7}', mapping: Valid }, + Range { from: '\u{1e8}', to: '\u{1e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1e9}', to: '\u{1e9}', mapping: Valid }, + Range { from: '\u{1ea}', to: '\u{1ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1eb}', to: '\u{1eb}', mapping: Valid }, + Range { from: '\u{1ec}', to: '\u{1ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ed}', to: '\u{1ed}', mapping: Valid }, + Range { from: '\u{1ee}', to: '\u{1ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ef}', to: '\u{1f0}', mapping: Valid }, + Range { from: '\u{1f1}', to: '\u{1f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1f4}', to: '\u{1f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1f5}', to: '\u{1f5}', mapping: Valid }, + Range { from: '\u{1f6}', to: '\u{1f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1f7}', to: '\u{1f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1f8}', to: '\u{1f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1f9}', to: '\u{1f9}', mapping: Valid }, + Range { from: '\u{1fa}', to: '\u{1fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1fb}', to: '\u{1fb}', mapping: Valid }, + Range { from: '\u{1fc}', to: '\u{1fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1fd}', to: '\u{1fd}', mapping: Valid }, + Range { from: '\u{1fe}', to: '\u{1fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1ff}', to: '\u{1ff}', mapping: Valid }, + Range { from: '\u{200}', to: '\u{200}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{201}', to: '\u{201}', mapping: Valid }, + Range { from: '\u{202}', to: '\u{202}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{203}', to: '\u{203}', mapping: Valid }, + Range { from: '\u{204}', to: '\u{204}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{205}', to: '\u{205}', mapping: Valid }, + Range { from: '\u{206}', to: '\u{206}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{207}', to: '\u{207}', mapping: Valid }, + Range { from: '\u{208}', to: '\u{208}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{209}', to: '\u{209}', mapping: Valid }, + Range { from: '\u{20a}', to: '\u{20a}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{20b}', to: '\u{20b}', mapping: Valid }, + Range { from: '\u{20c}', to: '\u{20c}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{20d}', to: '\u{20d}', mapping: Valid }, + Range { from: '\u{20e}', to: '\u{20e}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{20f}', to: '\u{20f}', mapping: Valid }, + Range { from: '\u{210}', to: '\u{210}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{211}', to: '\u{211}', mapping: Valid }, + Range { from: '\u{212}', to: '\u{212}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{213}', to: '\u{213}', mapping: Valid }, + Range { from: '\u{214}', to: '\u{214}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{215}', to: '\u{215}', mapping: Valid }, + Range { from: '\u{216}', to: '\u{216}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{217}', to: '\u{217}', mapping: Valid }, + Range { from: '\u{218}', to: '\u{218}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{219}', to: '\u{219}', mapping: Valid }, + Range { from: '\u{21a}', to: '\u{21a}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{21b}', to: '\u{21b}', mapping: Valid }, + Range { from: '\u{21c}', to: '\u{21c}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{21d}', to: '\u{21d}', mapping: Valid }, + Range { from: '\u{21e}', to: '\u{21e}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{21f}', to: '\u{21f}', mapping: Valid }, + Range { from: '\u{220}', to: '\u{220}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{221}', to: '\u{221}', mapping: Valid }, + Range { from: '\u{222}', to: '\u{222}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{223}', to: '\u{223}', mapping: Valid }, + Range { from: '\u{224}', to: '\u{224}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{225}', to: '\u{225}', mapping: Valid }, + Range { from: '\u{226}', to: '\u{226}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{227}', to: '\u{227}', mapping: Valid }, + Range { from: '\u{228}', to: '\u{228}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{229}', to: '\u{229}', mapping: Valid }, + Range { from: '\u{22a}', to: '\u{22a}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{22b}', to: '\u{22b}', mapping: Valid }, + Range { from: '\u{22c}', to: '\u{22c}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{22d}', to: '\u{22d}', mapping: Valid }, + Range { from: '\u{22e}', to: '\u{22e}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{22f}', to: '\u{22f}', mapping: Valid }, + Range { from: '\u{230}', to: '\u{230}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{231}', to: '\u{231}', mapping: Valid }, + Range { from: '\u{232}', to: '\u{232}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{233}', to: '\u{239}', mapping: Valid }, + Range { from: '\u{23a}', to: '\u{23a}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{23b}', to: '\u{23b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{23c}', to: '\u{23c}', mapping: Valid }, + Range { from: '\u{23d}', to: '\u{23d}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{23e}', to: '\u{23e}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{23f}', to: '\u{240}', mapping: Valid }, + Range { from: '\u{241}', to: '\u{241}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{242}', to: '\u{242}', mapping: Valid }, + Range { from: '\u{243}', to: '\u{243}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{244}', to: '\u{244}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{245}', to: '\u{245}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{246}', to: '\u{246}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{247}', to: '\u{247}', mapping: Valid }, + Range { from: '\u{248}', to: '\u{248}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{249}', to: '\u{249}', mapping: Valid }, + Range { from: '\u{24a}', to: '\u{24a}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{24b}', to: '\u{24b}', mapping: Valid }, + Range { from: '\u{24c}', to: '\u{24c}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{24d}', to: '\u{24d}', mapping: Valid }, + Range { from: '\u{24e}', to: '\u{24e}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{24f}', to: '\u{2af}', mapping: Valid }, + Range { from: '\u{2b0}', to: '\u{2b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2b1}', to: '\u{2b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2b2}', to: '\u{2b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2b3}', to: '\u{2b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2b4}', to: '\u{2b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2b5}', to: '\u{2b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2b6}', to: '\u{2b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2b7}', to: '\u{2b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2b8}', to: '\u{2b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2b9}', to: '\u{2d7}', mapping: Valid }, + Range { from: '\u{2d8}', to: '\u{2d8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2d9}', to: '\u{2d9}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2da}', to: '\u{2da}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2db}', to: '\u{2db}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2dc}', to: '\u{2dc}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2dd}', to: '\u{2dd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{2de}', to: '\u{2df}', mapping: Valid }, + Range { from: '\u{2e0}', to: '\u{2e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2e1}', to: '\u{2e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2e2}', to: '\u{2e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2e3}', to: '\u{2e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2e4}', to: '\u{2e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2e5}', to: '\u{33f}', mapping: Valid }, + Range { from: '\u{340}', to: '\u{340}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{341}', to: '\u{341}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{342}', to: '\u{342}', mapping: Valid }, + Range { from: '\u{343}', to: '\u{343}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{344}', to: '\u{344}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 1, byte_len: 4 }) }, + Range { from: '\u{345}', to: '\u{345}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{346}', to: '\u{34e}', mapping: Valid }, + Range { from: '\u{34f}', to: '\u{34f}', mapping: Ignored }, + Range { from: '\u{350}', to: '\u{36f}', mapping: Valid }, + Range { from: '\u{370}', to: '\u{370}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{371}', to: '\u{371}', mapping: Valid }, + Range { from: '\u{372}', to: '\u{372}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{373}', to: '\u{373}', mapping: Valid }, + Range { from: '\u{374}', to: '\u{374}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{375}', to: '\u{375}', mapping: Valid }, + Range { from: '\u{376}', to: '\u{376}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{377}', to: '\u{377}', mapping: Valid }, + Range { from: '\u{378}', to: '\u{379}', mapping: Disallowed }, + Range { from: '\u{37a}', to: '\u{37a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 1, byte_len: 3 }) }, + Range { from: '\u{37b}', to: '\u{37d}', mapping: Valid }, + Range { from: '\u{37e}', to: '\u{37e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, + Range { from: '\u{37f}', to: '\u{37f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{380}', to: '\u{383}', mapping: Disallowed }, + Range { from: '\u{384}', to: '\u{384}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{385}', to: '\u{385}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }) }, + Range { from: '\u{386}', to: '\u{386}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{387}', to: '\u{387}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{388}', to: '\u{388}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{389}', to: '\u{389}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{38a}', to: '\u{38a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{38b}', to: '\u{38b}', mapping: Disallowed }, + Range { from: '\u{38c}', to: '\u{38c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{38d}', to: '\u{38d}', mapping: Disallowed }, + Range { from: '\u{38e}', to: '\u{38e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{38f}', to: '\u{38f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{390}', to: '\u{390}', mapping: Valid }, + Range { from: '\u{391}', to: '\u{391}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{392}', to: '\u{392}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{393}', to: '\u{393}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{394}', to: '\u{394}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{395}', to: '\u{395}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{396}', to: '\u{396}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{397}', to: '\u{397}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{398}', to: '\u{398}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{399}', to: '\u{399}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{39a}', to: '\u{39a}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{39b}', to: '\u{39b}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{39c}', to: '\u{39c}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{39d}', to: '\u{39d}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{39e}', to: '\u{39e}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{39f}', to: '\u{39f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a0}', to: '\u{3a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a1}', to: '\u{3a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a2}', to: '\u{3a2}', mapping: Disallowed }, + Range { from: '\u{3a3}', to: '\u{3a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a4}', to: '\u{3a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a5}', to: '\u{3a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a6}', to: '\u{3a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a7}', to: '\u{3a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a8}', to: '\u{3a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3a9}', to: '\u{3a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3aa}', to: '\u{3aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3ab}', to: '\u{3ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3ac}', to: '\u{3c1}', mapping: Valid }, + Range { from: '\u{3c2}', to: '\u{3c2}', mapping: Deviation(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3c3}', to: '\u{3ce}', mapping: Valid }, + Range { from: '\u{3cf}', to: '\u{3cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d0}', to: '\u{3d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d1}', to: '\u{3d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d2}', to: '\u{3d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d3}', to: '\u{3d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d4}', to: '\u{3d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d5}', to: '\u{3d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d6}', to: '\u{3d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d7}', to: '\u{3d7}', mapping: Valid }, + Range { from: '\u{3d8}', to: '\u{3d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3d9}', to: '\u{3d9}', mapping: Valid }, + Range { from: '\u{3da}', to: '\u{3da}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3db}', to: '\u{3db}', mapping: Valid }, + Range { from: '\u{3dc}', to: '\u{3dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3dd}', to: '\u{3dd}', mapping: Valid }, + Range { from: '\u{3de}', to: '\u{3de}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3df}', to: '\u{3df}', mapping: Valid }, + Range { from: '\u{3e0}', to: '\u{3e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3e1}', to: '\u{3e1}', mapping: Valid }, + Range { from: '\u{3e2}', to: '\u{3e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3e3}', to: '\u{3e3}', mapping: Valid }, + Range { from: '\u{3e4}', to: '\u{3e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3e5}', to: '\u{3e5}', mapping: Valid }, + Range { from: '\u{3e6}', to: '\u{3e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3e7}', to: '\u{3e7}', mapping: Valid }, + Range { from: '\u{3e8}', to: '\u{3e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3e9}', to: '\u{3e9}', mapping: Valid }, + Range { from: '\u{3ea}', to: '\u{3ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3eb}', to: '\u{3eb}', mapping: Valid }, + Range { from: '\u{3ec}', to: '\u{3ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3ed}', to: '\u{3ed}', mapping: Valid }, + Range { from: '\u{3ee}', to: '\u{3ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3ef}', to: '\u{3ef}', mapping: Valid }, + Range { from: '\u{3f0}', to: '\u{3f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f1}', to: '\u{3f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f2}', to: '\u{3f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f3}', to: '\u{3f3}', mapping: Valid }, + Range { from: '\u{3f4}', to: '\u{3f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f5}', to: '\u{3f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f6}', to: '\u{3f6}', mapping: Valid }, + Range { from: '\u{3f7}', to: '\u{3f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3f8}', to: '\u{3f8}', mapping: Valid }, + Range { from: '\u{3f9}', to: '\u{3f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3fa}', to: '\u{3fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3fb}', to: '\u{3fc}', mapping: Valid }, + Range { from: '\u{3fd}', to: '\u{3fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3fe}', to: '\u{3fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{3ff}', to: '\u{3ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{400}', to: '\u{400}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{401}', to: '\u{401}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{402}', to: '\u{402}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{403}', to: '\u{403}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{404}', to: '\u{404}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{405}', to: '\u{405}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{406}', to: '\u{406}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{407}', to: '\u{407}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{408}', to: '\u{408}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{409}', to: '\u{409}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40a}', to: '\u{40a}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40b}', to: '\u{40b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40c}', to: '\u{40c}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40d}', to: '\u{40d}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40e}', to: '\u{40e}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{40f}', to: '\u{40f}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{410}', to: '\u{410}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{411}', to: '\u{411}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{412}', to: '\u{412}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{413}', to: '\u{413}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{414}', to: '\u{414}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{415}', to: '\u{415}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{416}', to: '\u{416}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{417}', to: '\u{417}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{418}', to: '\u{418}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{419}', to: '\u{419}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41a}', to: '\u{41a}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41b}', to: '\u{41b}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41c}', to: '\u{41c}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41d}', to: '\u{41d}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41e}', to: '\u{41e}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{41f}', to: '\u{41f}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{420}', to: '\u{420}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{421}', to: '\u{421}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{422}', to: '\u{422}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{423}', to: '\u{423}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{424}', to: '\u{424}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{425}', to: '\u{425}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{426}', to: '\u{426}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{427}', to: '\u{427}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{428}', to: '\u{428}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{429}', to: '\u{429}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42a}', to: '\u{42a}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42b}', to: '\u{42b}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42c}', to: '\u{42c}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42d}', to: '\u{42d}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42e}', to: '\u{42e}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{42f}', to: '\u{42f}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{430}', to: '\u{45f}', mapping: Valid }, + Range { from: '\u{460}', to: '\u{460}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{461}', to: '\u{461}', mapping: Valid }, + Range { from: '\u{462}', to: '\u{462}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{463}', to: '\u{463}', mapping: Valid }, + Range { from: '\u{464}', to: '\u{464}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{465}', to: '\u{465}', mapping: Valid }, + Range { from: '\u{466}', to: '\u{466}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{467}', to: '\u{467}', mapping: Valid }, + Range { from: '\u{468}', to: '\u{468}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{469}', to: '\u{469}', mapping: Valid }, + Range { from: '\u{46a}', to: '\u{46a}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{46b}', to: '\u{46b}', mapping: Valid }, + Range { from: '\u{46c}', to: '\u{46c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{46d}', to: '\u{46d}', mapping: Valid }, + Range { from: '\u{46e}', to: '\u{46e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{46f}', to: '\u{46f}', mapping: Valid }, + Range { from: '\u{470}', to: '\u{470}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{471}', to: '\u{471}', mapping: Valid }, + Range { from: '\u{472}', to: '\u{472}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{473}', to: '\u{473}', mapping: Valid }, + Range { from: '\u{474}', to: '\u{474}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{475}', to: '\u{475}', mapping: Valid }, + Range { from: '\u{476}', to: '\u{476}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{477}', to: '\u{477}', mapping: Valid }, + Range { from: '\u{478}', to: '\u{478}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{479}', to: '\u{479}', mapping: Valid }, + Range { from: '\u{47a}', to: '\u{47a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{47b}', to: '\u{47b}', mapping: Valid }, + Range { from: '\u{47c}', to: '\u{47c}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{47d}', to: '\u{47d}', mapping: Valid }, + Range { from: '\u{47e}', to: '\u{47e}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{47f}', to: '\u{47f}', mapping: Valid }, + Range { from: '\u{480}', to: '\u{480}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{481}', to: '\u{489}', mapping: Valid }, + Range { from: '\u{48a}', to: '\u{48a}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{48b}', to: '\u{48b}', mapping: Valid }, + Range { from: '\u{48c}', to: '\u{48c}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{48d}', to: '\u{48d}', mapping: Valid }, + Range { from: '\u{48e}', to: '\u{48e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{48f}', to: '\u{48f}', mapping: Valid }, + Range { from: '\u{490}', to: '\u{490}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{491}', to: '\u{491}', mapping: Valid }, + Range { from: '\u{492}', to: '\u{492}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{493}', to: '\u{493}', mapping: Valid }, + Range { from: '\u{494}', to: '\u{494}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{495}', to: '\u{495}', mapping: Valid }, + Range { from: '\u{496}', to: '\u{496}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{497}', to: '\u{497}', mapping: Valid }, + Range { from: '\u{498}', to: '\u{498}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{499}', to: '\u{499}', mapping: Valid }, + Range { from: '\u{49a}', to: '\u{49a}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{49b}', to: '\u{49b}', mapping: Valid }, + Range { from: '\u{49c}', to: '\u{49c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{49d}', to: '\u{49d}', mapping: Valid }, + Range { from: '\u{49e}', to: '\u{49e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{49f}', to: '\u{49f}', mapping: Valid }, + Range { from: '\u{4a0}', to: '\u{4a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4a1}', to: '\u{4a1}', mapping: Valid }, + Range { from: '\u{4a2}', to: '\u{4a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4a3}', to: '\u{4a3}', mapping: Valid }, + Range { from: '\u{4a4}', to: '\u{4a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4a5}', to: '\u{4a5}', mapping: Valid }, + Range { from: '\u{4a6}', to: '\u{4a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4a7}', to: '\u{4a7}', mapping: Valid }, + Range { from: '\u{4a8}', to: '\u{4a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4a9}', to: '\u{4a9}', mapping: Valid }, + Range { from: '\u{4aa}', to: '\u{4aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ab}', to: '\u{4ab}', mapping: Valid }, + Range { from: '\u{4ac}', to: '\u{4ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ad}', to: '\u{4ad}', mapping: Valid }, + Range { from: '\u{4ae}', to: '\u{4ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4af}', to: '\u{4af}', mapping: Valid }, + Range { from: '\u{4b0}', to: '\u{4b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4b1}', to: '\u{4b1}', mapping: Valid }, + Range { from: '\u{4b2}', to: '\u{4b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4b3}', to: '\u{4b3}', mapping: Valid }, + Range { from: '\u{4b4}', to: '\u{4b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4b5}', to: '\u{4b5}', mapping: Valid }, + Range { from: '\u{4b6}', to: '\u{4b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4b7}', to: '\u{4b7}', mapping: Valid }, + Range { from: '\u{4b8}', to: '\u{4b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4b9}', to: '\u{4b9}', mapping: Valid }, + Range { from: '\u{4ba}', to: '\u{4ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4bb}', to: '\u{4bb}', mapping: Valid }, + Range { from: '\u{4bc}', to: '\u{4bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4bd}', to: '\u{4bd}', mapping: Valid }, + Range { from: '\u{4be}', to: '\u{4be}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4bf}', to: '\u{4bf}', mapping: Valid }, + Range { from: '\u{4c0}', to: '\u{4c0}', mapping: Disallowed }, + Range { from: '\u{4c1}', to: '\u{4c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4c2}', to: '\u{4c2}', mapping: Valid }, + Range { from: '\u{4c3}', to: '\u{4c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4c4}', to: '\u{4c4}', mapping: Valid }, + Range { from: '\u{4c5}', to: '\u{4c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4c6}', to: '\u{4c6}', mapping: Valid }, + Range { from: '\u{4c7}', to: '\u{4c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4c8}', to: '\u{4c8}', mapping: Valid }, + Range { from: '\u{4c9}', to: '\u{4c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ca}', to: '\u{4ca}', mapping: Valid }, + Range { from: '\u{4cb}', to: '\u{4cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4cc}', to: '\u{4cc}', mapping: Valid }, + Range { from: '\u{4cd}', to: '\u{4cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ce}', to: '\u{4cf}', mapping: Valid }, + Range { from: '\u{4d0}', to: '\u{4d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4d1}', to: '\u{4d1}', mapping: Valid }, + Range { from: '\u{4d2}', to: '\u{4d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4d3}', to: '\u{4d3}', mapping: Valid }, + Range { from: '\u{4d4}', to: '\u{4d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4d5}', to: '\u{4d5}', mapping: Valid }, + Range { from: '\u{4d6}', to: '\u{4d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4d7}', to: '\u{4d7}', mapping: Valid }, + Range { from: '\u{4d8}', to: '\u{4d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4d9}', to: '\u{4d9}', mapping: Valid }, + Range { from: '\u{4da}', to: '\u{4da}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4db}', to: '\u{4db}', mapping: Valid }, + Range { from: '\u{4dc}', to: '\u{4dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4dd}', to: '\u{4dd}', mapping: Valid }, + Range { from: '\u{4de}', to: '\u{4de}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4df}', to: '\u{4df}', mapping: Valid }, + Range { from: '\u{4e0}', to: '\u{4e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4e1}', to: '\u{4e1}', mapping: Valid }, + Range { from: '\u{4e2}', to: '\u{4e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4e3}', to: '\u{4e3}', mapping: Valid }, + Range { from: '\u{4e4}', to: '\u{4e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4e5}', to: '\u{4e5}', mapping: Valid }, + Range { from: '\u{4e6}', to: '\u{4e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4e7}', to: '\u{4e7}', mapping: Valid }, + Range { from: '\u{4e8}', to: '\u{4e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4e9}', to: '\u{4e9}', mapping: Valid }, + Range { from: '\u{4ea}', to: '\u{4ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4eb}', to: '\u{4eb}', mapping: Valid }, + Range { from: '\u{4ec}', to: '\u{4ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ed}', to: '\u{4ed}', mapping: Valid }, + Range { from: '\u{4ee}', to: '\u{4ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ef}', to: '\u{4ef}', mapping: Valid }, + Range { from: '\u{4f0}', to: '\u{4f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4f1}', to: '\u{4f1}', mapping: Valid }, + Range { from: '\u{4f2}', to: '\u{4f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4f3}', to: '\u{4f3}', mapping: Valid }, + Range { from: '\u{4f4}', to: '\u{4f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4f5}', to: '\u{4f5}', mapping: Valid }, + Range { from: '\u{4f6}', to: '\u{4f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4f7}', to: '\u{4f7}', mapping: Valid }, + Range { from: '\u{4f8}', to: '\u{4f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4f9}', to: '\u{4f9}', mapping: Valid }, + Range { from: '\u{4fa}', to: '\u{4fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4fb}', to: '\u{4fb}', mapping: Valid }, + Range { from: '\u{4fc}', to: '\u{4fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4fd}', to: '\u{4fd}', mapping: Valid }, + Range { from: '\u{4fe}', to: '\u{4fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{4ff}', to: '\u{4ff}', mapping: Valid }, + Range { from: '\u{500}', to: '\u{500}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{501}', to: '\u{501}', mapping: Valid }, + Range { from: '\u{502}', to: '\u{502}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{503}', to: '\u{503}', mapping: Valid }, + Range { from: '\u{504}', to: '\u{504}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{505}', to: '\u{505}', mapping: Valid }, + Range { from: '\u{506}', to: '\u{506}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{507}', to: '\u{507}', mapping: Valid }, + Range { from: '\u{508}', to: '\u{508}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{509}', to: '\u{509}', mapping: Valid }, + Range { from: '\u{50a}', to: '\u{50a}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{50b}', to: '\u{50b}', mapping: Valid }, + Range { from: '\u{50c}', to: '\u{50c}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{50d}', to: '\u{50d}', mapping: Valid }, + Range { from: '\u{50e}', to: '\u{50e}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{50f}', to: '\u{50f}', mapping: Valid }, + Range { from: '\u{510}', to: '\u{510}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{511}', to: '\u{511}', mapping: Valid }, + Range { from: '\u{512}', to: '\u{512}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{513}', to: '\u{513}', mapping: Valid }, + Range { from: '\u{514}', to: '\u{514}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{515}', to: '\u{515}', mapping: Valid }, + Range { from: '\u{516}', to: '\u{516}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{517}', to: '\u{517}', mapping: Valid }, + Range { from: '\u{518}', to: '\u{518}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{519}', to: '\u{519}', mapping: Valid }, + Range { from: '\u{51a}', to: '\u{51a}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{51b}', to: '\u{51b}', mapping: Valid }, + Range { from: '\u{51c}', to: '\u{51c}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{51d}', to: '\u{51d}', mapping: Valid }, + Range { from: '\u{51e}', to: '\u{51e}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{51f}', to: '\u{51f}', mapping: Valid }, + Range { from: '\u{520}', to: '\u{520}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{521}', to: '\u{521}', mapping: Valid }, + Range { from: '\u{522}', to: '\u{522}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{523}', to: '\u{523}', mapping: Valid }, + Range { from: '\u{524}', to: '\u{524}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{525}', to: '\u{525}', mapping: Valid }, + Range { from: '\u{526}', to: '\u{526}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{527}', to: '\u{527}', mapping: Valid }, + Range { from: '\u{528}', to: '\u{528}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{529}', to: '\u{529}', mapping: Valid }, + Range { from: '\u{52a}', to: '\u{52a}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{52b}', to: '\u{52b}', mapping: Valid }, + Range { from: '\u{52c}', to: '\u{52c}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{52d}', to: '\u{52d}', mapping: Valid }, + Range { from: '\u{52e}', to: '\u{52e}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{52f}', to: '\u{52f}', mapping: Valid }, + Range { from: '\u{530}', to: '\u{530}', mapping: Disallowed }, + Range { from: '\u{531}', to: '\u{531}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{532}', to: '\u{532}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{533}', to: '\u{533}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{534}', to: '\u{534}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{535}', to: '\u{535}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{536}', to: '\u{536}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{537}', to: '\u{537}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{538}', to: '\u{538}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{539}', to: '\u{539}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53a}', to: '\u{53a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53b}', to: '\u{53b}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53c}', to: '\u{53c}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53d}', to: '\u{53d}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53e}', to: '\u{53e}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{53f}', to: '\u{53f}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{540}', to: '\u{540}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{541}', to: '\u{541}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{542}', to: '\u{542}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{543}', to: '\u{543}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{544}', to: '\u{544}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{545}', to: '\u{545}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{546}', to: '\u{546}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{547}', to: '\u{547}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{548}', to: '\u{548}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{549}', to: '\u{549}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54a}', to: '\u{54a}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54b}', to: '\u{54b}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54c}', to: '\u{54c}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54d}', to: '\u{54d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54e}', to: '\u{54e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{54f}', to: '\u{54f}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{550}', to: '\u{550}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{551}', to: '\u{551}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{552}', to: '\u{552}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{553}', to: '\u{553}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{554}', to: '\u{554}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{555}', to: '\u{555}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{556}', to: '\u{556}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 3, byte_len: 2 }) }, + Range { from: '\u{557}', to: '\u{558}', mapping: Disallowed }, + Range { from: '\u{559}', to: '\u{55f}', mapping: Valid }, + Range { from: '\u{560}', to: '\u{560}', mapping: Disallowed }, + Range { from: '\u{561}', to: '\u{586}', mapping: Valid }, + Range { from: '\u{587}', to: '\u{587}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{588}', to: '\u{588}', mapping: Disallowed }, + Range { from: '\u{589}', to: '\u{58a}', mapping: Valid }, + Range { from: '\u{58b}', to: '\u{58c}', mapping: Disallowed }, + Range { from: '\u{58d}', to: '\u{58f}', mapping: Valid }, + Range { from: '\u{590}', to: '\u{590}', mapping: Disallowed }, + Range { from: '\u{591}', to: '\u{5c7}', mapping: Valid }, + Range { from: '\u{5c8}', to: '\u{5cf}', mapping: Disallowed }, + Range { from: '\u{5d0}', to: '\u{5ea}', mapping: Valid }, + Range { from: '\u{5eb}', to: '\u{5ef}', mapping: Disallowed }, + Range { from: '\u{5f0}', to: '\u{5f4}', mapping: Valid }, + Range { from: '\u{5f5}', to: '\u{605}', mapping: Disallowed }, + Range { from: '\u{606}', to: '\u{61b}', mapping: Valid }, + Range { from: '\u{61c}', to: '\u{61d}', mapping: Disallowed }, + Range { from: '\u{61e}', to: '\u{674}', mapping: Valid }, + Range { from: '\u{675}', to: '\u{675}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{676}', to: '\u{676}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{677}', to: '\u{677}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{678}', to: '\u{678}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{679}', to: '\u{6dc}', mapping: Valid }, + Range { from: '\u{6dd}', to: '\u{6dd}', mapping: Disallowed }, + Range { from: '\u{6de}', to: '\u{70d}', mapping: Valid }, + Range { from: '\u{70e}', to: '\u{70f}', mapping: Disallowed }, + Range { from: '\u{710}', to: '\u{74a}', mapping: Valid }, + Range { from: '\u{74b}', to: '\u{74c}', mapping: Disallowed }, + Range { from: '\u{74d}', to: '\u{7b1}', mapping: Valid }, + Range { from: '\u{7b2}', to: '\u{7bf}', mapping: Disallowed }, + Range { from: '\u{7c0}', to: '\u{7fa}', mapping: Valid }, + Range { from: '\u{7fb}', to: '\u{7ff}', mapping: Disallowed }, + Range { from: '\u{800}', to: '\u{82d}', mapping: Valid }, + Range { from: '\u{82e}', to: '\u{82f}', mapping: Disallowed }, + Range { from: '\u{830}', to: '\u{83e}', mapping: Valid }, + Range { from: '\u{83f}', to: '\u{83f}', mapping: Disallowed }, + Range { from: '\u{840}', to: '\u{85b}', mapping: Valid }, + Range { from: '\u{85c}', to: '\u{85d}', mapping: Disallowed }, + Range { from: '\u{85e}', to: '\u{85e}', mapping: Valid }, + Range { from: '\u{85f}', to: '\u{89f}', mapping: Disallowed }, + Range { from: '\u{8a0}', to: '\u{8b4}', mapping: Valid }, + Range { from: '\u{8b5}', to: '\u{8b5}', mapping: Disallowed }, + Range { from: '\u{8b6}', to: '\u{8bd}', mapping: Valid }, + Range { from: '\u{8be}', to: '\u{8d3}', mapping: Disallowed }, + Range { from: '\u{8d4}', to: '\u{8e1}', mapping: Valid }, + Range { from: '\u{8e2}', to: '\u{8e2}', mapping: Disallowed }, + Range { from: '\u{8e3}', to: '\u{957}', mapping: Valid }, + Range { from: '\u{958}', to: '\u{958}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 3, byte_len: 6 }) }, + Range { from: '\u{959}', to: '\u{959}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 3, byte_len: 6 }) }, + Range { from: '\u{95a}', to: '\u{95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 3, byte_len: 6 }) }, + Range { from: '\u{95b}', to: '\u{95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{95c}', to: '\u{95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{95d}', to: '\u{95d}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{95e}', to: '\u{95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{95f}', to: '\u{95f}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{960}', to: '\u{983}', mapping: Valid }, + Range { from: '\u{984}', to: '\u{984}', mapping: Disallowed }, + Range { from: '\u{985}', to: '\u{98c}', mapping: Valid }, + Range { from: '\u{98d}', to: '\u{98e}', mapping: Disallowed }, + Range { from: '\u{98f}', to: '\u{990}', mapping: Valid }, + Range { from: '\u{991}', to: '\u{992}', mapping: Disallowed }, + Range { from: '\u{993}', to: '\u{9a8}', mapping: Valid }, + Range { from: '\u{9a9}', to: '\u{9a9}', mapping: Disallowed }, + Range { from: '\u{9aa}', to: '\u{9b0}', mapping: Valid }, + Range { from: '\u{9b1}', to: '\u{9b1}', mapping: Disallowed }, + Range { from: '\u{9b2}', to: '\u{9b2}', mapping: Valid }, + Range { from: '\u{9b3}', to: '\u{9b5}', mapping: Disallowed }, + Range { from: '\u{9b6}', to: '\u{9b9}', mapping: Valid }, + Range { from: '\u{9ba}', to: '\u{9bb}', mapping: Disallowed }, + Range { from: '\u{9bc}', to: '\u{9c4}', mapping: Valid }, + Range { from: '\u{9c5}', to: '\u{9c6}', mapping: Disallowed }, + Range { from: '\u{9c7}', to: '\u{9c8}', mapping: Valid }, + Range { from: '\u{9c9}', to: '\u{9ca}', mapping: Disallowed }, + Range { from: '\u{9cb}', to: '\u{9ce}', mapping: Valid }, + Range { from: '\u{9cf}', to: '\u{9d6}', mapping: Disallowed }, + Range { from: '\u{9d7}', to: '\u{9d7}', mapping: Valid }, + Range { from: '\u{9d8}', to: '\u{9db}', mapping: Disallowed }, + Range { from: '\u{9dc}', to: '\u{9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{9dd}', to: '\u{9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{9de}', to: '\u{9de}', mapping: Disallowed }, + Range { from: '\u{9df}', to: '\u{9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{9e0}', to: '\u{9e3}', mapping: Valid }, + Range { from: '\u{9e4}', to: '\u{9e5}', mapping: Disallowed }, + Range { from: '\u{9e6}', to: '\u{9fb}', mapping: Valid }, + Range { from: '\u{9fc}', to: '\u{a00}', mapping: Disallowed }, + Range { from: '\u{a01}', to: '\u{a03}', mapping: Valid }, + Range { from: '\u{a04}', to: '\u{a04}', mapping: Disallowed }, + Range { from: '\u{a05}', to: '\u{a0a}', mapping: Valid }, + Range { from: '\u{a0b}', to: '\u{a0e}', mapping: Disallowed }, + Range { from: '\u{a0f}', to: '\u{a10}', mapping: Valid }, + Range { from: '\u{a11}', to: '\u{a12}', mapping: Disallowed }, + Range { from: '\u{a13}', to: '\u{a28}', mapping: Valid }, + Range { from: '\u{a29}', to: '\u{a29}', mapping: Disallowed }, + Range { from: '\u{a2a}', to: '\u{a30}', mapping: Valid }, + Range { from: '\u{a31}', to: '\u{a31}', mapping: Disallowed }, + Range { from: '\u{a32}', to: '\u{a32}', mapping: Valid }, + Range { from: '\u{a33}', to: '\u{a33}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a34}', to: '\u{a34}', mapping: Disallowed }, + Range { from: '\u{a35}', to: '\u{a35}', mapping: Valid }, + Range { from: '\u{a36}', to: '\u{a36}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a37}', to: '\u{a37}', mapping: Disallowed }, + Range { from: '\u{a38}', to: '\u{a39}', mapping: Valid }, + Range { from: '\u{a3a}', to: '\u{a3b}', mapping: Disallowed }, + Range { from: '\u{a3c}', to: '\u{a3c}', mapping: Valid }, + Range { from: '\u{a3d}', to: '\u{a3d}', mapping: Disallowed }, + Range { from: '\u{a3e}', to: '\u{a42}', mapping: Valid }, + Range { from: '\u{a43}', to: '\u{a46}', mapping: Disallowed }, + Range { from: '\u{a47}', to: '\u{a48}', mapping: Valid }, + Range { from: '\u{a49}', to: '\u{a4a}', mapping: Disallowed }, + Range { from: '\u{a4b}', to: '\u{a4d}', mapping: Valid }, + Range { from: '\u{a4e}', to: '\u{a50}', mapping: Disallowed }, + Range { from: '\u{a51}', to: '\u{a51}', mapping: Valid }, + Range { from: '\u{a52}', to: '\u{a58}', mapping: Disallowed }, + Range { from: '\u{a59}', to: '\u{a59}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a5a}', to: '\u{a5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a5b}', to: '\u{a5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a5c}', to: '\u{a5c}', mapping: Valid }, + Range { from: '\u{a5d}', to: '\u{a5d}', mapping: Disallowed }, + Range { from: '\u{a5e}', to: '\u{a5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{a5f}', to: '\u{a65}', mapping: Disallowed }, + Range { from: '\u{a66}', to: '\u{a75}', mapping: Valid }, + Range { from: '\u{a76}', to: '\u{a80}', mapping: Disallowed }, + Range { from: '\u{a81}', to: '\u{a83}', mapping: Valid }, + Range { from: '\u{a84}', to: '\u{a84}', mapping: Disallowed }, + Range { from: '\u{a85}', to: '\u{a8d}', mapping: Valid }, + Range { from: '\u{a8e}', to: '\u{a8e}', mapping: Disallowed }, + Range { from: '\u{a8f}', to: '\u{a91}', mapping: Valid }, + Range { from: '\u{a92}', to: '\u{a92}', mapping: Disallowed }, + Range { from: '\u{a93}', to: '\u{aa8}', mapping: Valid }, + Range { from: '\u{aa9}', to: '\u{aa9}', mapping: Disallowed }, + Range { from: '\u{aaa}', to: '\u{ab0}', mapping: Valid }, + Range { from: '\u{ab1}', to: '\u{ab1}', mapping: Disallowed }, + Range { from: '\u{ab2}', to: '\u{ab3}', mapping: Valid }, + Range { from: '\u{ab4}', to: '\u{ab4}', mapping: Disallowed }, + Range { from: '\u{ab5}', to: '\u{ab9}', mapping: Valid }, + Range { from: '\u{aba}', to: '\u{abb}', mapping: Disallowed }, + Range { from: '\u{abc}', to: '\u{ac5}', mapping: Valid }, + Range { from: '\u{ac6}', to: '\u{ac6}', mapping: Disallowed }, + Range { from: '\u{ac7}', to: '\u{ac9}', mapping: Valid }, + Range { from: '\u{aca}', to: '\u{aca}', mapping: Disallowed }, + Range { from: '\u{acb}', to: '\u{acd}', mapping: Valid }, + Range { from: '\u{ace}', to: '\u{acf}', mapping: Disallowed }, + Range { from: '\u{ad0}', to: '\u{ad0}', mapping: Valid }, + Range { from: '\u{ad1}', to: '\u{adf}', mapping: Disallowed }, + Range { from: '\u{ae0}', to: '\u{ae3}', mapping: Valid }, + Range { from: '\u{ae4}', to: '\u{ae5}', mapping: Disallowed }, + Range { from: '\u{ae6}', to: '\u{af1}', mapping: Valid }, + Range { from: '\u{af2}', to: '\u{af8}', mapping: Disallowed }, + Range { from: '\u{af9}', to: '\u{af9}', mapping: Valid }, + Range { from: '\u{afa}', to: '\u{b00}', mapping: Disallowed }, + Range { from: '\u{b01}', to: '\u{b03}', mapping: Valid }, + Range { from: '\u{b04}', to: '\u{b04}', mapping: Disallowed }, + Range { from: '\u{b05}', to: '\u{b0c}', mapping: Valid }, + Range { from: '\u{b0d}', to: '\u{b0e}', mapping: Disallowed }, + Range { from: '\u{b0f}', to: '\u{b10}', mapping: Valid }, + Range { from: '\u{b11}', to: '\u{b12}', mapping: Disallowed }, + Range { from: '\u{b13}', to: '\u{b28}', mapping: Valid }, + Range { from: '\u{b29}', to: '\u{b29}', mapping: Disallowed }, + Range { from: '\u{b2a}', to: '\u{b30}', mapping: Valid }, + Range { from: '\u{b31}', to: '\u{b31}', mapping: Disallowed }, + Range { from: '\u{b32}', to: '\u{b33}', mapping: Valid }, + Range { from: '\u{b34}', to: '\u{b34}', mapping: Disallowed }, + Range { from: '\u{b35}', to: '\u{b39}', mapping: Valid }, + Range { from: '\u{b3a}', to: '\u{b3b}', mapping: Disallowed }, + Range { from: '\u{b3c}', to: '\u{b44}', mapping: Valid }, + Range { from: '\u{b45}', to: '\u{b46}', mapping: Disallowed }, + Range { from: '\u{b47}', to: '\u{b48}', mapping: Valid }, + Range { from: '\u{b49}', to: '\u{b4a}', mapping: Disallowed }, + Range { from: '\u{b4b}', to: '\u{b4d}', mapping: Valid }, + Range { from: '\u{b4e}', to: '\u{b55}', mapping: Disallowed }, + Range { from: '\u{b56}', to: '\u{b57}', mapping: Valid }, + Range { from: '\u{b58}', to: '\u{b5b}', mapping: Disallowed }, + Range { from: '\u{b5c}', to: '\u{b5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{b5d}', to: '\u{b5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{b5e}', to: '\u{b5e}', mapping: Disallowed }, + Range { from: '\u{b5f}', to: '\u{b63}', mapping: Valid }, + Range { from: '\u{b64}', to: '\u{b65}', mapping: Disallowed }, + Range { from: '\u{b66}', to: '\u{b77}', mapping: Valid }, + Range { from: '\u{b78}', to: '\u{b81}', mapping: Disallowed }, + Range { from: '\u{b82}', to: '\u{b83}', mapping: Valid }, + Range { from: '\u{b84}', to: '\u{b84}', mapping: Disallowed }, + Range { from: '\u{b85}', to: '\u{b8a}', mapping: Valid }, + Range { from: '\u{b8b}', to: '\u{b8d}', mapping: Disallowed }, + Range { from: '\u{b8e}', to: '\u{b90}', mapping: Valid }, + Range { from: '\u{b91}', to: '\u{b91}', mapping: Disallowed }, + Range { from: '\u{b92}', to: '\u{b95}', mapping: Valid }, + Range { from: '\u{b96}', to: '\u{b98}', mapping: Disallowed }, + Range { from: '\u{b99}', to: '\u{b9a}', mapping: Valid }, + Range { from: '\u{b9b}', to: '\u{b9b}', mapping: Disallowed }, + Range { from: '\u{b9c}', to: '\u{b9c}', mapping: Valid }, + Range { from: '\u{b9d}', to: '\u{b9d}', mapping: Disallowed }, + Range { from: '\u{b9e}', to: '\u{b9f}', mapping: Valid }, + Range { from: '\u{ba0}', to: '\u{ba2}', mapping: Disallowed }, + Range { from: '\u{ba3}', to: '\u{ba4}', mapping: Valid }, + Range { from: '\u{ba5}', to: '\u{ba7}', mapping: Disallowed }, + Range { from: '\u{ba8}', to: '\u{baa}', mapping: Valid }, + Range { from: '\u{bab}', to: '\u{bad}', mapping: Disallowed }, + Range { from: '\u{bae}', to: '\u{bb9}', mapping: Valid }, + Range { from: '\u{bba}', to: '\u{bbd}', mapping: Disallowed }, + Range { from: '\u{bbe}', to: '\u{bc2}', mapping: Valid }, + Range { from: '\u{bc3}', to: '\u{bc5}', mapping: Disallowed }, + Range { from: '\u{bc6}', to: '\u{bc8}', mapping: Valid }, + Range { from: '\u{bc9}', to: '\u{bc9}', mapping: Disallowed }, + Range { from: '\u{bca}', to: '\u{bcd}', mapping: Valid }, + Range { from: '\u{bce}', to: '\u{bcf}', mapping: Disallowed }, + Range { from: '\u{bd0}', to: '\u{bd0}', mapping: Valid }, + Range { from: '\u{bd1}', to: '\u{bd6}', mapping: Disallowed }, + Range { from: '\u{bd7}', to: '\u{bd7}', mapping: Valid }, + Range { from: '\u{bd8}', to: '\u{be5}', mapping: Disallowed }, + Range { from: '\u{be6}', to: '\u{bfa}', mapping: Valid }, + Range { from: '\u{bfb}', to: '\u{bff}', mapping: Disallowed }, + Range { from: '\u{c00}', to: '\u{c03}', mapping: Valid }, + Range { from: '\u{c04}', to: '\u{c04}', mapping: Disallowed }, + Range { from: '\u{c05}', to: '\u{c0c}', mapping: Valid }, + Range { from: '\u{c0d}', to: '\u{c0d}', mapping: Disallowed }, + Range { from: '\u{c0e}', to: '\u{c10}', mapping: Valid }, + Range { from: '\u{c11}', to: '\u{c11}', mapping: Disallowed }, + Range { from: '\u{c12}', to: '\u{c28}', mapping: Valid }, + Range { from: '\u{c29}', to: '\u{c29}', mapping: Disallowed }, + Range { from: '\u{c2a}', to: '\u{c39}', mapping: Valid }, + Range { from: '\u{c3a}', to: '\u{c3c}', mapping: Disallowed }, + Range { from: '\u{c3d}', to: '\u{c44}', mapping: Valid }, + Range { from: '\u{c45}', to: '\u{c45}', mapping: Disallowed }, + Range { from: '\u{c46}', to: '\u{c48}', mapping: Valid }, + Range { from: '\u{c49}', to: '\u{c49}', mapping: Disallowed }, + Range { from: '\u{c4a}', to: '\u{c4d}', mapping: Valid }, + Range { from: '\u{c4e}', to: '\u{c54}', mapping: Disallowed }, + Range { from: '\u{c55}', to: '\u{c56}', mapping: Valid }, + Range { from: '\u{c57}', to: '\u{c57}', mapping: Disallowed }, + Range { from: '\u{c58}', to: '\u{c5a}', mapping: Valid }, + Range { from: '\u{c5b}', to: '\u{c5f}', mapping: Disallowed }, + Range { from: '\u{c60}', to: '\u{c63}', mapping: Valid }, + Range { from: '\u{c64}', to: '\u{c65}', mapping: Disallowed }, + Range { from: '\u{c66}', to: '\u{c6f}', mapping: Valid }, + Range { from: '\u{c70}', to: '\u{c77}', mapping: Disallowed }, + Range { from: '\u{c78}', to: '\u{c83}', mapping: Valid }, + Range { from: '\u{c84}', to: '\u{c84}', mapping: Disallowed }, + Range { from: '\u{c85}', to: '\u{c8c}', mapping: Valid }, + Range { from: '\u{c8d}', to: '\u{c8d}', mapping: Disallowed }, + Range { from: '\u{c8e}', to: '\u{c90}', mapping: Valid }, + Range { from: '\u{c91}', to: '\u{c91}', mapping: Disallowed }, + Range { from: '\u{c92}', to: '\u{ca8}', mapping: Valid }, + Range { from: '\u{ca9}', to: '\u{ca9}', mapping: Disallowed }, + Range { from: '\u{caa}', to: '\u{cb3}', mapping: Valid }, + Range { from: '\u{cb4}', to: '\u{cb4}', mapping: Disallowed }, + Range { from: '\u{cb5}', to: '\u{cb9}', mapping: Valid }, + Range { from: '\u{cba}', to: '\u{cbb}', mapping: Disallowed }, + Range { from: '\u{cbc}', to: '\u{cc4}', mapping: Valid }, + Range { from: '\u{cc5}', to: '\u{cc5}', mapping: Disallowed }, + Range { from: '\u{cc6}', to: '\u{cc8}', mapping: Valid }, + Range { from: '\u{cc9}', to: '\u{cc9}', mapping: Disallowed }, + Range { from: '\u{cca}', to: '\u{ccd}', mapping: Valid }, + Range { from: '\u{cce}', to: '\u{cd4}', mapping: Disallowed }, + Range { from: '\u{cd5}', to: '\u{cd6}', mapping: Valid }, + Range { from: '\u{cd7}', to: '\u{cdd}', mapping: Disallowed }, + Range { from: '\u{cde}', to: '\u{cde}', mapping: Valid }, + Range { from: '\u{cdf}', to: '\u{cdf}', mapping: Disallowed }, + Range { from: '\u{ce0}', to: '\u{ce3}', mapping: Valid }, + Range { from: '\u{ce4}', to: '\u{ce5}', mapping: Disallowed }, + Range { from: '\u{ce6}', to: '\u{cef}', mapping: Valid }, + Range { from: '\u{cf0}', to: '\u{cf0}', mapping: Disallowed }, + Range { from: '\u{cf1}', to: '\u{cf2}', mapping: Valid }, + Range { from: '\u{cf3}', to: '\u{d00}', mapping: Disallowed }, + Range { from: '\u{d01}', to: '\u{d03}', mapping: Valid }, + Range { from: '\u{d04}', to: '\u{d04}', mapping: Disallowed }, + Range { from: '\u{d05}', to: '\u{d0c}', mapping: Valid }, + Range { from: '\u{d0d}', to: '\u{d0d}', mapping: Disallowed }, + Range { from: '\u{d0e}', to: '\u{d10}', mapping: Valid }, + Range { from: '\u{d11}', to: '\u{d11}', mapping: Disallowed }, + Range { from: '\u{d12}', to: '\u{d3a}', mapping: Valid }, + Range { from: '\u{d3b}', to: '\u{d3c}', mapping: Disallowed }, + Range { from: '\u{d3d}', to: '\u{d44}', mapping: Valid }, + Range { from: '\u{d45}', to: '\u{d45}', mapping: Disallowed }, + Range { from: '\u{d46}', to: '\u{d48}', mapping: Valid }, + Range { from: '\u{d49}', to: '\u{d49}', mapping: Disallowed }, + Range { from: '\u{d4a}', to: '\u{d4f}', mapping: Valid }, + Range { from: '\u{d50}', to: '\u{d53}', mapping: Disallowed }, + Range { from: '\u{d54}', to: '\u{d63}', mapping: Valid }, + Range { from: '\u{d64}', to: '\u{d65}', mapping: Disallowed }, + Range { from: '\u{d66}', to: '\u{d7f}', mapping: Valid }, + Range { from: '\u{d80}', to: '\u{d81}', mapping: Disallowed }, + Range { from: '\u{d82}', to: '\u{d83}', mapping: Valid }, + Range { from: '\u{d84}', to: '\u{d84}', mapping: Disallowed }, + Range { from: '\u{d85}', to: '\u{d96}', mapping: Valid }, + Range { from: '\u{d97}', to: '\u{d99}', mapping: Disallowed }, + Range { from: '\u{d9a}', to: '\u{db1}', mapping: Valid }, + Range { from: '\u{db2}', to: '\u{db2}', mapping: Disallowed }, + Range { from: '\u{db3}', to: '\u{dbb}', mapping: Valid }, + Range { from: '\u{dbc}', to: '\u{dbc}', mapping: Disallowed }, + Range { from: '\u{dbd}', to: '\u{dbd}', mapping: Valid }, + Range { from: '\u{dbe}', to: '\u{dbf}', mapping: Disallowed }, + Range { from: '\u{dc0}', to: '\u{dc6}', mapping: Valid }, + Range { from: '\u{dc7}', to: '\u{dc9}', mapping: Disallowed }, + Range { from: '\u{dca}', to: '\u{dca}', mapping: Valid }, + Range { from: '\u{dcb}', to: '\u{dce}', mapping: Disallowed }, + Range { from: '\u{dcf}', to: '\u{dd4}', mapping: Valid }, + Range { from: '\u{dd5}', to: '\u{dd5}', mapping: Disallowed }, + Range { from: '\u{dd6}', to: '\u{dd6}', mapping: Valid }, + Range { from: '\u{dd7}', to: '\u{dd7}', mapping: Disallowed }, + Range { from: '\u{dd8}', to: '\u{ddf}', mapping: Valid }, + Range { from: '\u{de0}', to: '\u{de5}', mapping: Disallowed }, + Range { from: '\u{de6}', to: '\u{def}', mapping: Valid }, + Range { from: '\u{df0}', to: '\u{df1}', mapping: Disallowed }, + Range { from: '\u{df2}', to: '\u{df4}', mapping: Valid }, + Range { from: '\u{df5}', to: '\u{e00}', mapping: Disallowed }, + Range { from: '\u{e01}', to: '\u{e32}', mapping: Valid }, + Range { from: '\u{e33}', to: '\u{e33}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{e34}', to: '\u{e3a}', mapping: Valid }, + Range { from: '\u{e3b}', to: '\u{e3e}', mapping: Disallowed }, + Range { from: '\u{e3f}', to: '\u{e5b}', mapping: Valid }, + Range { from: '\u{e5c}', to: '\u{e80}', mapping: Disallowed }, + Range { from: '\u{e81}', to: '\u{e82}', mapping: Valid }, + Range { from: '\u{e83}', to: '\u{e83}', mapping: Disallowed }, + Range { from: '\u{e84}', to: '\u{e84}', mapping: Valid }, + Range { from: '\u{e85}', to: '\u{e86}', mapping: Disallowed }, + Range { from: '\u{e87}', to: '\u{e88}', mapping: Valid }, + Range { from: '\u{e89}', to: '\u{e89}', mapping: Disallowed }, + Range { from: '\u{e8a}', to: '\u{e8a}', mapping: Valid }, + Range { from: '\u{e8b}', to: '\u{e8c}', mapping: Disallowed }, + Range { from: '\u{e8d}', to: '\u{e8d}', mapping: Valid }, + Range { from: '\u{e8e}', to: '\u{e93}', mapping: Disallowed }, + Range { from: '\u{e94}', to: '\u{e97}', mapping: Valid }, + Range { from: '\u{e98}', to: '\u{e98}', mapping: Disallowed }, + Range { from: '\u{e99}', to: '\u{e9f}', mapping: Valid }, + Range { from: '\u{ea0}', to: '\u{ea0}', mapping: Disallowed }, + Range { from: '\u{ea1}', to: '\u{ea3}', mapping: Valid }, + Range { from: '\u{ea4}', to: '\u{ea4}', mapping: Disallowed }, + Range { from: '\u{ea5}', to: '\u{ea5}', mapping: Valid }, + Range { from: '\u{ea6}', to: '\u{ea6}', mapping: Disallowed }, + Range { from: '\u{ea7}', to: '\u{ea7}', mapping: Valid }, + Range { from: '\u{ea8}', to: '\u{ea9}', mapping: Disallowed }, + Range { from: '\u{eaa}', to: '\u{eab}', mapping: Valid }, + Range { from: '\u{eac}', to: '\u{eac}', mapping: Disallowed }, + Range { from: '\u{ead}', to: '\u{eb2}', mapping: Valid }, + Range { from: '\u{eb3}', to: '\u{eb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{eb4}', to: '\u{eb9}', mapping: Valid }, + Range { from: '\u{eba}', to: '\u{eba}', mapping: Disallowed }, + Range { from: '\u{ebb}', to: '\u{ebd}', mapping: Valid }, + Range { from: '\u{ebe}', to: '\u{ebf}', mapping: Disallowed }, + Range { from: '\u{ec0}', to: '\u{ec4}', mapping: Valid }, + Range { from: '\u{ec5}', to: '\u{ec5}', mapping: Disallowed }, + Range { from: '\u{ec6}', to: '\u{ec6}', mapping: Valid }, + Range { from: '\u{ec7}', to: '\u{ec7}', mapping: Disallowed }, + Range { from: '\u{ec8}', to: '\u{ecd}', mapping: Valid }, + Range { from: '\u{ece}', to: '\u{ecf}', mapping: Disallowed }, + Range { from: '\u{ed0}', to: '\u{ed9}', mapping: Valid }, + Range { from: '\u{eda}', to: '\u{edb}', mapping: Disallowed }, + Range { from: '\u{edc}', to: '\u{edc}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{edd}', to: '\u{edd}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{ede}', to: '\u{edf}', mapping: Valid }, + Range { from: '\u{ee0}', to: '\u{eff}', mapping: Disallowed }, + Range { from: '\u{f00}', to: '\u{f0b}', mapping: Valid }, + Range { from: '\u{f0c}', to: '\u{f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 4, byte_len: 3 }) }, + Range { from: '\u{f0d}', to: '\u{f42}', mapping: Valid }, + Range { from: '\u{f43}', to: '\u{f43}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f44}', to: '\u{f47}', mapping: Valid }, + Range { from: '\u{f48}', to: '\u{f48}', mapping: Disallowed }, + Range { from: '\u{f49}', to: '\u{f4c}', mapping: Valid }, + Range { from: '\u{f4d}', to: '\u{f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f4e}', to: '\u{f51}', mapping: Valid }, + Range { from: '\u{f52}', to: '\u{f52}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f53}', to: '\u{f56}', mapping: Valid }, + Range { from: '\u{f57}', to: '\u{f57}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f58}', to: '\u{f5b}', mapping: Valid }, + Range { from: '\u{f5c}', to: '\u{f5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f5d}', to: '\u{f68}', mapping: Valid }, + Range { from: '\u{f69}', to: '\u{f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f6a}', to: '\u{f6c}', mapping: Valid }, + Range { from: '\u{f6d}', to: '\u{f70}', mapping: Disallowed }, + Range { from: '\u{f71}', to: '\u{f72}', mapping: Valid }, + Range { from: '\u{f73}', to: '\u{f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f74}', to: '\u{f74}', mapping: Valid }, + Range { from: '\u{f75}', to: '\u{f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f76}', to: '\u{f76}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f77}', to: '\u{f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 4, byte_len: 9 }) }, + Range { from: '\u{f78}', to: '\u{f78}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f79}', to: '\u{f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 4, byte_len: 9 }) }, + Range { from: '\u{f7a}', to: '\u{f80}', mapping: Valid }, + Range { from: '\u{f81}', to: '\u{f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f82}', to: '\u{f92}', mapping: Valid }, + Range { from: '\u{f93}', to: '\u{f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f94}', to: '\u{f97}', mapping: Valid }, + Range { from: '\u{f98}', to: '\u{f98}', mapping: Disallowed }, + Range { from: '\u{f99}', to: '\u{f9c}', mapping: Valid }, + Range { from: '\u{f9d}', to: '\u{f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{f9e}', to: '\u{fa1}', mapping: Valid }, + Range { from: '\u{fa2}', to: '\u{fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{fa3}', to: '\u{fa6}', mapping: Valid }, + Range { from: '\u{fa7}', to: '\u{fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{fa8}', to: '\u{fab}', mapping: Valid }, + Range { from: '\u{fac}', to: '\u{fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{fad}', to: '\u{fb8}', mapping: Valid }, + Range { from: '\u{fb9}', to: '\u{fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 4, byte_len: 6 }) }, + Range { from: '\u{fba}', to: '\u{fbc}', mapping: Valid }, + Range { from: '\u{fbd}', to: '\u{fbd}', mapping: Disallowed }, + Range { from: '\u{fbe}', to: '\u{fcc}', mapping: Valid }, + Range { from: '\u{fcd}', to: '\u{fcd}', mapping: Disallowed }, + Range { from: '\u{fce}', to: '\u{fda}', mapping: Valid }, + Range { from: '\u{fdb}', to: '\u{fff}', mapping: Disallowed }, + Range { from: '\u{1000}', to: '\u{109f}', mapping: Valid }, + Range { from: '\u{10a0}', to: '\u{10c6}', mapping: Disallowed }, + Range { from: '\u{10c7}', to: '\u{10c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 4, byte_len: 3 }) }, + Range { from: '\u{10c8}', to: '\u{10cc}', mapping: Disallowed }, + Range { from: '\u{10cd}', to: '\u{10cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 4, byte_len: 3 }) }, + Range { from: '\u{10ce}', to: '\u{10cf}', mapping: Disallowed }, + Range { from: '\u{10d0}', to: '\u{10fb}', mapping: Valid }, + Range { from: '\u{10fc}', to: '\u{10fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 4, byte_len: 3 }) }, + Range { from: '\u{10fd}', to: '\u{115e}', mapping: Valid }, + Range { from: '\u{115f}', to: '\u{1160}', mapping: Disallowed }, + Range { from: '\u{1161}', to: '\u{1248}', mapping: Valid }, + Range { from: '\u{1249}', to: '\u{1249}', mapping: Disallowed }, + Range { from: '\u{124a}', to: '\u{124d}', mapping: Valid }, + Range { from: '\u{124e}', to: '\u{124f}', mapping: Disallowed }, + Range { from: '\u{1250}', to: '\u{1256}', mapping: Valid }, + Range { from: '\u{1257}', to: '\u{1257}', mapping: Disallowed }, + Range { from: '\u{1258}', to: '\u{1258}', mapping: Valid }, + Range { from: '\u{1259}', to: '\u{1259}', mapping: Disallowed }, + Range { from: '\u{125a}', to: '\u{125d}', mapping: Valid }, + Range { from: '\u{125e}', to: '\u{125f}', mapping: Disallowed }, + Range { from: '\u{1260}', to: '\u{1288}', mapping: Valid }, + Range { from: '\u{1289}', to: '\u{1289}', mapping: Disallowed }, + Range { from: '\u{128a}', to: '\u{128d}', mapping: Valid }, + Range { from: '\u{128e}', to: '\u{128f}', mapping: Disallowed }, + Range { from: '\u{1290}', to: '\u{12b0}', mapping: Valid }, + Range { from: '\u{12b1}', to: '\u{12b1}', mapping: Disallowed }, + Range { from: '\u{12b2}', to: '\u{12b5}', mapping: Valid }, + Range { from: '\u{12b6}', to: '\u{12b7}', mapping: Disallowed }, + Range { from: '\u{12b8}', to: '\u{12be}', mapping: Valid }, + Range { from: '\u{12bf}', to: '\u{12bf}', mapping: Disallowed }, + Range { from: '\u{12c0}', to: '\u{12c0}', mapping: Valid }, + Range { from: '\u{12c1}', to: '\u{12c1}', mapping: Disallowed }, + Range { from: '\u{12c2}', to: '\u{12c5}', mapping: Valid }, + Range { from: '\u{12c6}', to: '\u{12c7}', mapping: Disallowed }, + Range { from: '\u{12c8}', to: '\u{12d6}', mapping: Valid }, + Range { from: '\u{12d7}', to: '\u{12d7}', mapping: Disallowed }, + Range { from: '\u{12d8}', to: '\u{1310}', mapping: Valid }, + Range { from: '\u{1311}', to: '\u{1311}', mapping: Disallowed }, + Range { from: '\u{1312}', to: '\u{1315}', mapping: Valid }, + Range { from: '\u{1316}', to: '\u{1317}', mapping: Disallowed }, + Range { from: '\u{1318}', to: '\u{135a}', mapping: Valid }, + Range { from: '\u{135b}', to: '\u{135c}', mapping: Disallowed }, + Range { from: '\u{135d}', to: '\u{137c}', mapping: Valid }, + Range { from: '\u{137d}', to: '\u{137f}', mapping: Disallowed }, + Range { from: '\u{1380}', to: '\u{1399}', mapping: Valid }, + Range { from: '\u{139a}', to: '\u{139f}', mapping: Disallowed }, + Range { from: '\u{13a0}', to: '\u{13f5}', mapping: Valid }, + Range { from: '\u{13f6}', to: '\u{13f7}', mapping: Disallowed }, + Range { from: '\u{13f8}', to: '\u{13f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13f9}', to: '\u{13f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13fa}', to: '\u{13fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13fb}', to: '\u{13fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13fc}', to: '\u{13fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13fd}', to: '\u{13fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{13fe}', to: '\u{13ff}', mapping: Disallowed }, + Range { from: '\u{1400}', to: '\u{167f}', mapping: Valid }, + Range { from: '\u{1680}', to: '\u{1680}', mapping: Disallowed }, + Range { from: '\u{1681}', to: '\u{169c}', mapping: Valid }, + Range { from: '\u{169d}', to: '\u{169f}', mapping: Disallowed }, + Range { from: '\u{16a0}', to: '\u{16f8}', mapping: Valid }, + Range { from: '\u{16f9}', to: '\u{16ff}', mapping: Disallowed }, + Range { from: '\u{1700}', to: '\u{170c}', mapping: Valid }, + Range { from: '\u{170d}', to: '\u{170d}', mapping: Disallowed }, + Range { from: '\u{170e}', to: '\u{1714}', mapping: Valid }, + Range { from: '\u{1715}', to: '\u{171f}', mapping: Disallowed }, + Range { from: '\u{1720}', to: '\u{1736}', mapping: Valid }, + Range { from: '\u{1737}', to: '\u{173f}', mapping: Disallowed }, + Range { from: '\u{1740}', to: '\u{1753}', mapping: Valid }, + Range { from: '\u{1754}', to: '\u{175f}', mapping: Disallowed }, + Range { from: '\u{1760}', to: '\u{176c}', mapping: Valid }, + Range { from: '\u{176d}', to: '\u{176d}', mapping: Disallowed }, + Range { from: '\u{176e}', to: '\u{1770}', mapping: Valid }, + Range { from: '\u{1771}', to: '\u{1771}', mapping: Disallowed }, + Range { from: '\u{1772}', to: '\u{1773}', mapping: Valid }, + Range { from: '\u{1774}', to: '\u{177f}', mapping: Disallowed }, + Range { from: '\u{1780}', to: '\u{17b3}', mapping: Valid }, + Range { from: '\u{17b4}', to: '\u{17b5}', mapping: Disallowed }, + Range { from: '\u{17b6}', to: '\u{17dd}', mapping: Valid }, + Range { from: '\u{17de}', to: '\u{17df}', mapping: Disallowed }, + Range { from: '\u{17e0}', to: '\u{17e9}', mapping: Valid }, + Range { from: '\u{17ea}', to: '\u{17ef}', mapping: Disallowed }, + Range { from: '\u{17f0}', to: '\u{17f9}', mapping: Valid }, + Range { from: '\u{17fa}', to: '\u{17ff}', mapping: Disallowed }, + Range { from: '\u{1800}', to: '\u{1805}', mapping: Valid }, + Range { from: '\u{1806}', to: '\u{1806}', mapping: Disallowed }, + Range { from: '\u{1807}', to: '\u{180a}', mapping: Valid }, + Range { from: '\u{180b}', to: '\u{180d}', mapping: Ignored }, + Range { from: '\u{180e}', to: '\u{180f}', mapping: Disallowed }, + Range { from: '\u{1810}', to: '\u{1819}', mapping: Valid }, + Range { from: '\u{181a}', to: '\u{181f}', mapping: Disallowed }, + Range { from: '\u{1820}', to: '\u{1877}', mapping: Valid }, + Range { from: '\u{1878}', to: '\u{187f}', mapping: Disallowed }, + Range { from: '\u{1880}', to: '\u{18aa}', mapping: Valid }, + Range { from: '\u{18ab}', to: '\u{18af}', mapping: Disallowed }, + Range { from: '\u{18b0}', to: '\u{18f5}', mapping: Valid }, + Range { from: '\u{18f6}', to: '\u{18ff}', mapping: Disallowed }, + Range { from: '\u{1900}', to: '\u{191e}', mapping: Valid }, + Range { from: '\u{191f}', to: '\u{191f}', mapping: Disallowed }, + Range { from: '\u{1920}', to: '\u{192b}', mapping: Valid }, + Range { from: '\u{192c}', to: '\u{192f}', mapping: Disallowed }, + Range { from: '\u{1930}', to: '\u{193b}', mapping: Valid }, + Range { from: '\u{193c}', to: '\u{193f}', mapping: Disallowed }, + Range { from: '\u{1940}', to: '\u{1940}', mapping: Valid }, + Range { from: '\u{1941}', to: '\u{1943}', mapping: Disallowed }, + Range { from: '\u{1944}', to: '\u{196d}', mapping: Valid }, + Range { from: '\u{196e}', to: '\u{196f}', mapping: Disallowed }, + Range { from: '\u{1970}', to: '\u{1974}', mapping: Valid }, + Range { from: '\u{1975}', to: '\u{197f}', mapping: Disallowed }, + Range { from: '\u{1980}', to: '\u{19ab}', mapping: Valid }, + Range { from: '\u{19ac}', to: '\u{19af}', mapping: Disallowed }, + Range { from: '\u{19b0}', to: '\u{19c9}', mapping: Valid }, + Range { from: '\u{19ca}', to: '\u{19cf}', mapping: Disallowed }, + Range { from: '\u{19d0}', to: '\u{19da}', mapping: Valid }, + Range { from: '\u{19db}', to: '\u{19dd}', mapping: Disallowed }, + Range { from: '\u{19de}', to: '\u{1a1b}', mapping: Valid }, + Range { from: '\u{1a1c}', to: '\u{1a1d}', mapping: Disallowed }, + Range { from: '\u{1a1e}', to: '\u{1a5e}', mapping: Valid }, + Range { from: '\u{1a5f}', to: '\u{1a5f}', mapping: Disallowed }, + Range { from: '\u{1a60}', to: '\u{1a7c}', mapping: Valid }, + Range { from: '\u{1a7d}', to: '\u{1a7e}', mapping: Disallowed }, + Range { from: '\u{1a7f}', to: '\u{1a89}', mapping: Valid }, + Range { from: '\u{1a8a}', to: '\u{1a8f}', mapping: Disallowed }, + Range { from: '\u{1a90}', to: '\u{1a99}', mapping: Valid }, + Range { from: '\u{1a9a}', to: '\u{1a9f}', mapping: Disallowed }, + Range { from: '\u{1aa0}', to: '\u{1aad}', mapping: Valid }, + Range { from: '\u{1aae}', to: '\u{1aaf}', mapping: Disallowed }, + Range { from: '\u{1ab0}', to: '\u{1abe}', mapping: Valid }, + Range { from: '\u{1abf}', to: '\u{1aff}', mapping: Disallowed }, + Range { from: '\u{1b00}', to: '\u{1b4b}', mapping: Valid }, + Range { from: '\u{1b4c}', to: '\u{1b4f}', mapping: Disallowed }, + Range { from: '\u{1b50}', to: '\u{1b7c}', mapping: Valid }, + Range { from: '\u{1b7d}', to: '\u{1b7f}', mapping: Disallowed }, + Range { from: '\u{1b80}', to: '\u{1bf3}', mapping: Valid }, + Range { from: '\u{1bf4}', to: '\u{1bfb}', mapping: Disallowed }, + Range { from: '\u{1bfc}', to: '\u{1c37}', mapping: Valid }, + Range { from: '\u{1c38}', to: '\u{1c3a}', mapping: Disallowed }, + Range { from: '\u{1c3b}', to: '\u{1c49}', mapping: Valid }, + Range { from: '\u{1c4a}', to: '\u{1c4c}', mapping: Disallowed }, + Range { from: '\u{1c4d}', to: '\u{1c7f}', mapping: Valid }, + Range { from: '\u{1c80}', to: '\u{1c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c81}', to: '\u{1c81}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c82}', to: '\u{1c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c83}', to: '\u{1c83}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c84}', to: '\u{1c85}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c86}', to: '\u{1c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c87}', to: '\u{1c87}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1c88}', to: '\u{1c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1c89}', to: '\u{1cbf}', mapping: Disallowed }, + Range { from: '\u{1cc0}', to: '\u{1cc7}', mapping: Valid }, + Range { from: '\u{1cc8}', to: '\u{1ccf}', mapping: Disallowed }, + Range { from: '\u{1cd0}', to: '\u{1cf6}', mapping: Valid }, + Range { from: '\u{1cf7}', to: '\u{1cf7}', mapping: Disallowed }, + Range { from: '\u{1cf8}', to: '\u{1cf9}', mapping: Valid }, + Range { from: '\u{1cfa}', to: '\u{1cff}', mapping: Disallowed }, + Range { from: '\u{1d00}', to: '\u{1d2b}', mapping: Valid }, + Range { from: '\u{1d2c}', to: '\u{1d2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d2d}', to: '\u{1d2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d2e}', to: '\u{1d2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d2f}', to: '\u{1d2f}', mapping: Valid }, + Range { from: '\u{1d30}', to: '\u{1d30}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d31}', to: '\u{1d31}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d32}', to: '\u{1d32}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d33}', to: '\u{1d33}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d34}', to: '\u{1d34}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d35}', to: '\u{1d35}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d36}', to: '\u{1d36}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d37}', to: '\u{1d37}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d38}', to: '\u{1d38}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d39}', to: '\u{1d39}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d3a}', to: '\u{1d3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d3b}', to: '\u{1d3b}', mapping: Valid }, + Range { from: '\u{1d3c}', to: '\u{1d3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d3d}', to: '\u{1d3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d3e}', to: '\u{1d3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d3f}', to: '\u{1d3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40}', to: '\u{1d40}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41}', to: '\u{1d41}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42}', to: '\u{1d42}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43}', to: '\u{1d43}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44}', to: '\u{1d44}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1d45}', to: '\u{1d45}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1d46}', to: '\u{1d46}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1d47}', to: '\u{1d47}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48}', to: '\u{1d48}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49}', to: '\u{1d49}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4a}', to: '\u{1d4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d4b}', to: '\u{1d4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d4c}', to: '\u{1d4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1d4d}', to: '\u{1d4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e}', to: '\u{1d4e}', mapping: Valid }, + Range { from: '\u{1d4f}', to: '\u{1d4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d50}', to: '\u{1d50}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51}', to: '\u{1d51}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d52}', to: '\u{1d52}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53}', to: '\u{1d53}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d54}', to: '\u{1d54}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1d55}', to: '\u{1d55}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1d56}', to: '\u{1d56}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57}', to: '\u{1d57}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58}', to: '\u{1d58}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59}', to: '\u{1d59}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1d5a}', to: '\u{1d5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d5b}', to: '\u{1d5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c}', to: '\u{1d5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1d5d}', to: '\u{1d5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d5e}', to: '\u{1d5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d5f}', to: '\u{1d5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d60}', to: '\u{1d60}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d61}', to: '\u{1d61}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d62}', to: '\u{1d62}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63}', to: '\u{1d63}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64}', to: '\u{1d64}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65}', to: '\u{1d65}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66}', to: '\u{1d66}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d67}', to: '\u{1d67}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d68}', to: '\u{1d68}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d69}', to: '\u{1d69}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6a}', to: '\u{1d6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b}', to: '\u{1d77}', mapping: Valid }, + Range { from: '\u{1d78}', to: '\u{1d78}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79}', to: '\u{1d9a}', mapping: Valid }, + Range { from: '\u{1d9b}', to: '\u{1d9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1d9c}', to: '\u{1d9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d9d}', to: '\u{1d9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1d9e}', to: '\u{1d9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d9f}', to: '\u{1d9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da0}', to: '\u{1da0}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1da1}', to: '\u{1da1}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da2}', to: '\u{1da2}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da3}', to: '\u{1da3}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da4}', to: '\u{1da4}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1da5}', to: '\u{1da5}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1da6}', to: '\u{1da6}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da7}', to: '\u{1da7}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1da8}', to: '\u{1da8}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1da9}', to: '\u{1da9}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1daa}', to: '\u{1daa}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1dab}', to: '\u{1dab}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1dac}', to: '\u{1dac}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1dad}', to: '\u{1dad}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1dae}', to: '\u{1dae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1daf}', to: '\u{1daf}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1db0}', to: '\u{1db0}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1db1}', to: '\u{1db1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1db2}', to: '\u{1db2}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1db3}', to: '\u{1db3}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1db4}', to: '\u{1db4}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1db5}', to: '\u{1db5}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1db6}', to: '\u{1db6}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1db7}', to: '\u{1db7}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1db8}', to: '\u{1db8}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1db9}', to: '\u{1db9}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1dba}', to: '\u{1dba}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1dbb}', to: '\u{1dbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1dbc}', to: '\u{1dbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1dbd}', to: '\u{1dbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{1dbe}', to: '\u{1dbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1dbf}', to: '\u{1dbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1dc0}', to: '\u{1df5}', mapping: Valid }, + Range { from: '\u{1df6}', to: '\u{1dfa}', mapping: Disallowed }, + Range { from: '\u{1dfb}', to: '\u{1dff}', mapping: Valid }, + Range { from: '\u{1e00}', to: '\u{1e00}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e01}', to: '\u{1e01}', mapping: Valid }, + Range { from: '\u{1e02}', to: '\u{1e02}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e03}', to: '\u{1e03}', mapping: Valid }, + Range { from: '\u{1e04}', to: '\u{1e04}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e05}', to: '\u{1e05}', mapping: Valid }, + Range { from: '\u{1e06}', to: '\u{1e06}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e07}', to: '\u{1e07}', mapping: Valid }, + Range { from: '\u{1e08}', to: '\u{1e08}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e09}', to: '\u{1e09}', mapping: Valid }, + Range { from: '\u{1e0a}', to: '\u{1e0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e0b}', to: '\u{1e0b}', mapping: Valid }, + Range { from: '\u{1e0c}', to: '\u{1e0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e0d}', to: '\u{1e0d}', mapping: Valid }, + Range { from: '\u{1e0e}', to: '\u{1e0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e0f}', to: '\u{1e0f}', mapping: Valid }, + Range { from: '\u{1e10}', to: '\u{1e10}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e11}', to: '\u{1e11}', mapping: Valid }, + Range { from: '\u{1e12}', to: '\u{1e12}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e13}', to: '\u{1e13}', mapping: Valid }, + Range { from: '\u{1e14}', to: '\u{1e14}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e15}', to: '\u{1e15}', mapping: Valid }, + Range { from: '\u{1e16}', to: '\u{1e16}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e17}', to: '\u{1e17}', mapping: Valid }, + Range { from: '\u{1e18}', to: '\u{1e18}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e19}', to: '\u{1e19}', mapping: Valid }, + Range { from: '\u{1e1a}', to: '\u{1e1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e1b}', to: '\u{1e1b}', mapping: Valid }, + Range { from: '\u{1e1c}', to: '\u{1e1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e1d}', to: '\u{1e1d}', mapping: Valid }, + Range { from: '\u{1e1e}', to: '\u{1e1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e1f}', to: '\u{1e1f}', mapping: Valid }, + Range { from: '\u{1e20}', to: '\u{1e20}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e21}', to: '\u{1e21}', mapping: Valid }, + Range { from: '\u{1e22}', to: '\u{1e22}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e23}', to: '\u{1e23}', mapping: Valid }, + Range { from: '\u{1e24}', to: '\u{1e24}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e25}', to: '\u{1e25}', mapping: Valid }, + Range { from: '\u{1e26}', to: '\u{1e26}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e27}', to: '\u{1e27}', mapping: Valid }, + Range { from: '\u{1e28}', to: '\u{1e28}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e29}', to: '\u{1e29}', mapping: Valid }, + Range { from: '\u{1e2a}', to: '\u{1e2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e2b}', to: '\u{1e2b}', mapping: Valid }, + Range { from: '\u{1e2c}', to: '\u{1e2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e2d}', to: '\u{1e2d}', mapping: Valid }, + Range { from: '\u{1e2e}', to: '\u{1e2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e2f}', to: '\u{1e2f}', mapping: Valid }, + Range { from: '\u{1e30}', to: '\u{1e30}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e31}', to: '\u{1e31}', mapping: Valid }, + Range { from: '\u{1e32}', to: '\u{1e32}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e33}', to: '\u{1e33}', mapping: Valid }, + Range { from: '\u{1e34}', to: '\u{1e34}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e35}', to: '\u{1e35}', mapping: Valid }, + Range { from: '\u{1e36}', to: '\u{1e36}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e37}', to: '\u{1e37}', mapping: Valid }, + Range { from: '\u{1e38}', to: '\u{1e38}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e39}', to: '\u{1e39}', mapping: Valid }, + Range { from: '\u{1e3a}', to: '\u{1e3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e3b}', to: '\u{1e3b}', mapping: Valid }, + Range { from: '\u{1e3c}', to: '\u{1e3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e3d}', to: '\u{1e3d}', mapping: Valid }, + Range { from: '\u{1e3e}', to: '\u{1e3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e3f}', to: '\u{1e3f}', mapping: Valid }, + Range { from: '\u{1e40}', to: '\u{1e40}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e41}', to: '\u{1e41}', mapping: Valid }, + Range { from: '\u{1e42}', to: '\u{1e42}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e43}', to: '\u{1e43}', mapping: Valid }, + Range { from: '\u{1e44}', to: '\u{1e44}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e45}', to: '\u{1e45}', mapping: Valid }, + Range { from: '\u{1e46}', to: '\u{1e46}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e47}', to: '\u{1e47}', mapping: Valid }, + Range { from: '\u{1e48}', to: '\u{1e48}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e49}', to: '\u{1e49}', mapping: Valid }, + Range { from: '\u{1e4a}', to: '\u{1e4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e4b}', to: '\u{1e4b}', mapping: Valid }, + Range { from: '\u{1e4c}', to: '\u{1e4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e4d}', to: '\u{1e4d}', mapping: Valid }, + Range { from: '\u{1e4e}', to: '\u{1e4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e4f}', to: '\u{1e4f}', mapping: Valid }, + Range { from: '\u{1e50}', to: '\u{1e50}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e51}', to: '\u{1e51}', mapping: Valid }, + Range { from: '\u{1e52}', to: '\u{1e52}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e53}', to: '\u{1e53}', mapping: Valid }, + Range { from: '\u{1e54}', to: '\u{1e54}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e55}', to: '\u{1e55}', mapping: Valid }, + Range { from: '\u{1e56}', to: '\u{1e56}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e57}', to: '\u{1e57}', mapping: Valid }, + Range { from: '\u{1e58}', to: '\u{1e58}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e59}', to: '\u{1e59}', mapping: Valid }, + Range { from: '\u{1e5a}', to: '\u{1e5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e5b}', to: '\u{1e5b}', mapping: Valid }, + Range { from: '\u{1e5c}', to: '\u{1e5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e5d}', to: '\u{1e5d}', mapping: Valid }, + Range { from: '\u{1e5e}', to: '\u{1e5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e5f}', to: '\u{1e5f}', mapping: Valid }, + Range { from: '\u{1e60}', to: '\u{1e60}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e61}', to: '\u{1e61}', mapping: Valid }, + Range { from: '\u{1e62}', to: '\u{1e62}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e63}', to: '\u{1e63}', mapping: Valid }, + Range { from: '\u{1e64}', to: '\u{1e64}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e65}', to: '\u{1e65}', mapping: Valid }, + Range { from: '\u{1e66}', to: '\u{1e66}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e67}', to: '\u{1e67}', mapping: Valid }, + Range { from: '\u{1e68}', to: '\u{1e68}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e69}', to: '\u{1e69}', mapping: Valid }, + Range { from: '\u{1e6a}', to: '\u{1e6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e6b}', to: '\u{1e6b}', mapping: Valid }, + Range { from: '\u{1e6c}', to: '\u{1e6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e6d}', to: '\u{1e6d}', mapping: Valid }, + Range { from: '\u{1e6e}', to: '\u{1e6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e6f}', to: '\u{1e6f}', mapping: Valid }, + Range { from: '\u{1e70}', to: '\u{1e70}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e71}', to: '\u{1e71}', mapping: Valid }, + Range { from: '\u{1e72}', to: '\u{1e72}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e73}', to: '\u{1e73}', mapping: Valid }, + Range { from: '\u{1e74}', to: '\u{1e74}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e75}', to: '\u{1e75}', mapping: Valid }, + Range { from: '\u{1e76}', to: '\u{1e76}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e77}', to: '\u{1e77}', mapping: Valid }, + Range { from: '\u{1e78}', to: '\u{1e78}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e79}', to: '\u{1e79}', mapping: Valid }, + Range { from: '\u{1e7a}', to: '\u{1e7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e7b}', to: '\u{1e7b}', mapping: Valid }, + Range { from: '\u{1e7c}', to: '\u{1e7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e7d}', to: '\u{1e7d}', mapping: Valid }, + Range { from: '\u{1e7e}', to: '\u{1e7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e7f}', to: '\u{1e7f}', mapping: Valid }, + Range { from: '\u{1e80}', to: '\u{1e80}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e81}', to: '\u{1e81}', mapping: Valid }, + Range { from: '\u{1e82}', to: '\u{1e82}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e83}', to: '\u{1e83}', mapping: Valid }, + Range { from: '\u{1e84}', to: '\u{1e84}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e85}', to: '\u{1e85}', mapping: Valid }, + Range { from: '\u{1e86}', to: '\u{1e86}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e87}', to: '\u{1e87}', mapping: Valid }, + Range { from: '\u{1e88}', to: '\u{1e88}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e89}', to: '\u{1e89}', mapping: Valid }, + Range { from: '\u{1e8a}', to: '\u{1e8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e8b}', to: '\u{1e8b}', mapping: Valid }, + Range { from: '\u{1e8c}', to: '\u{1e8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e8d}', to: '\u{1e8d}', mapping: Valid }, + Range { from: '\u{1e8e}', to: '\u{1e8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e8f}', to: '\u{1e8f}', mapping: Valid }, + Range { from: '\u{1e90}', to: '\u{1e90}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e91}', to: '\u{1e91}', mapping: Valid }, + Range { from: '\u{1e92}', to: '\u{1e92}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e93}', to: '\u{1e93}', mapping: Valid }, + Range { from: '\u{1e94}', to: '\u{1e94}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e95}', to: '\u{1e99}', mapping: Valid }, + Range { from: '\u{1e9a}', to: '\u{1e9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1e9b}', to: '\u{1e9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{1e9c}', to: '\u{1e9d}', mapping: Valid }, + Range { from: '\u{1e9e}', to: '\u{1e9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1e9f}', to: '\u{1e9f}', mapping: Valid }, + Range { from: '\u{1ea0}', to: '\u{1ea0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ea1}', to: '\u{1ea1}', mapping: Valid }, + Range { from: '\u{1ea2}', to: '\u{1ea2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ea3}', to: '\u{1ea3}', mapping: Valid }, + Range { from: '\u{1ea4}', to: '\u{1ea4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ea5}', to: '\u{1ea5}', mapping: Valid }, + Range { from: '\u{1ea6}', to: '\u{1ea6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ea7}', to: '\u{1ea7}', mapping: Valid }, + Range { from: '\u{1ea8}', to: '\u{1ea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ea9}', to: '\u{1ea9}', mapping: Valid }, + Range { from: '\u{1eaa}', to: '\u{1eaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eab}', to: '\u{1eab}', mapping: Valid }, + Range { from: '\u{1eac}', to: '\u{1eac}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ead}', to: '\u{1ead}', mapping: Valid }, + Range { from: '\u{1eae}', to: '\u{1eae}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eaf}', to: '\u{1eaf}', mapping: Valid }, + Range { from: '\u{1eb0}', to: '\u{1eb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eb1}', to: '\u{1eb1}', mapping: Valid }, + Range { from: '\u{1eb2}', to: '\u{1eb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eb3}', to: '\u{1eb3}', mapping: Valid }, + Range { from: '\u{1eb4}', to: '\u{1eb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eb5}', to: '\u{1eb5}', mapping: Valid }, + Range { from: '\u{1eb6}', to: '\u{1eb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eb7}', to: '\u{1eb7}', mapping: Valid }, + Range { from: '\u{1eb8}', to: '\u{1eb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eb9}', to: '\u{1eb9}', mapping: Valid }, + Range { from: '\u{1eba}', to: '\u{1eba}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ebb}', to: '\u{1ebb}', mapping: Valid }, + Range { from: '\u{1ebc}', to: '\u{1ebc}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ebd}', to: '\u{1ebd}', mapping: Valid }, + Range { from: '\u{1ebe}', to: '\u{1ebe}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ebf}', to: '\u{1ebf}', mapping: Valid }, + Range { from: '\u{1ec0}', to: '\u{1ec0}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ec1}', to: '\u{1ec1}', mapping: Valid }, + Range { from: '\u{1ec2}', to: '\u{1ec2}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ec3}', to: '\u{1ec3}', mapping: Valid }, + Range { from: '\u{1ec4}', to: '\u{1ec4}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ec5}', to: '\u{1ec5}', mapping: Valid }, + Range { from: '\u{1ec6}', to: '\u{1ec6}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ec7}', to: '\u{1ec7}', mapping: Valid }, + Range { from: '\u{1ec8}', to: '\u{1ec8}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ec9}', to: '\u{1ec9}', mapping: Valid }, + Range { from: '\u{1eca}', to: '\u{1eca}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ecb}', to: '\u{1ecb}', mapping: Valid }, + Range { from: '\u{1ecc}', to: '\u{1ecc}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ecd}', to: '\u{1ecd}', mapping: Valid }, + Range { from: '\u{1ece}', to: '\u{1ece}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ecf}', to: '\u{1ecf}', mapping: Valid }, + Range { from: '\u{1ed0}', to: '\u{1ed0}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ed1}', to: '\u{1ed1}', mapping: Valid }, + Range { from: '\u{1ed2}', to: '\u{1ed2}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ed3}', to: '\u{1ed3}', mapping: Valid }, + Range { from: '\u{1ed4}', to: '\u{1ed4}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ed5}', to: '\u{1ed5}', mapping: Valid }, + Range { from: '\u{1ed6}', to: '\u{1ed6}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ed7}', to: '\u{1ed7}', mapping: Valid }, + Range { from: '\u{1ed8}', to: '\u{1ed8}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ed9}', to: '\u{1ed9}', mapping: Valid }, + Range { from: '\u{1eda}', to: '\u{1eda}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1edb}', to: '\u{1edb}', mapping: Valid }, + Range { from: '\u{1edc}', to: '\u{1edc}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1edd}', to: '\u{1edd}', mapping: Valid }, + Range { from: '\u{1ede}', to: '\u{1ede}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1edf}', to: '\u{1edf}', mapping: Valid }, + Range { from: '\u{1ee0}', to: '\u{1ee0}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ee1}', to: '\u{1ee1}', mapping: Valid }, + Range { from: '\u{1ee2}', to: '\u{1ee2}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ee3}', to: '\u{1ee3}', mapping: Valid }, + Range { from: '\u{1ee4}', to: '\u{1ee4}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ee5}', to: '\u{1ee5}', mapping: Valid }, + Range { from: '\u{1ee6}', to: '\u{1ee6}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ee7}', to: '\u{1ee7}', mapping: Valid }, + Range { from: '\u{1ee8}', to: '\u{1ee8}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ee9}', to: '\u{1ee9}', mapping: Valid }, + Range { from: '\u{1eea}', to: '\u{1eea}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eeb}', to: '\u{1eeb}', mapping: Valid }, + Range { from: '\u{1eec}', to: '\u{1eec}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eed}', to: '\u{1eed}', mapping: Valid }, + Range { from: '\u{1eee}', to: '\u{1eee}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eef}', to: '\u{1eef}', mapping: Valid }, + Range { from: '\u{1ef0}', to: '\u{1ef0}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ef1}', to: '\u{1ef1}', mapping: Valid }, + Range { from: '\u{1ef2}', to: '\u{1ef2}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ef3}', to: '\u{1ef3}', mapping: Valid }, + Range { from: '\u{1ef4}', to: '\u{1ef4}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ef5}', to: '\u{1ef5}', mapping: Valid }, + Range { from: '\u{1ef6}', to: '\u{1ef6}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ef7}', to: '\u{1ef7}', mapping: Valid }, + Range { from: '\u{1ef8}', to: '\u{1ef8}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1ef9}', to: '\u{1ef9}', mapping: Valid }, + Range { from: '\u{1efa}', to: '\u{1efa}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1efb}', to: '\u{1efb}', mapping: Valid }, + Range { from: '\u{1efc}', to: '\u{1efc}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1efd}', to: '\u{1efd}', mapping: Valid }, + Range { from: '\u{1efe}', to: '\u{1efe}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1eff}', to: '\u{1f07}', mapping: Valid }, + Range { from: '\u{1f08}', to: '\u{1f08}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f09}', to: '\u{1f09}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0a}', to: '\u{1f0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0b}', to: '\u{1f0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0c}', to: '\u{1f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0d}', to: '\u{1f0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0e}', to: '\u{1f0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f0f}', to: '\u{1f0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f10}', to: '\u{1f15}', mapping: Valid }, + Range { from: '\u{1f16}', to: '\u{1f17}', mapping: Disallowed }, + Range { from: '\u{1f18}', to: '\u{1f18}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f19}', to: '\u{1f19}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f1a}', to: '\u{1f1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f1b}', to: '\u{1f1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f1c}', to: '\u{1f1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f1d}', to: '\u{1f1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f1e}', to: '\u{1f1f}', mapping: Disallowed }, + Range { from: '\u{1f20}', to: '\u{1f27}', mapping: Valid }, + Range { from: '\u{1f28}', to: '\u{1f28}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f29}', to: '\u{1f29}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f2a}', to: '\u{1f2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f2b}', to: '\u{1f2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 6, byte_len: 3 }) }, + Range { from: '\u{1f2c}', to: '\u{1f2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f2d}', to: '\u{1f2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f2e}', to: '\u{1f2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f2f}', to: '\u{1f2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f30}', to: '\u{1f37}', mapping: Valid }, + Range { from: '\u{1f38}', to: '\u{1f38}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f39}', to: '\u{1f39}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3a}', to: '\u{1f3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3b}', to: '\u{1f3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3c}', to: '\u{1f3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3d}', to: '\u{1f3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3e}', to: '\u{1f3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f3f}', to: '\u{1f3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f40}', to: '\u{1f45}', mapping: Valid }, + Range { from: '\u{1f46}', to: '\u{1f47}', mapping: Disallowed }, + Range { from: '\u{1f48}', to: '\u{1f48}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f49}', to: '\u{1f49}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f4a}', to: '\u{1f4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f4b}', to: '\u{1f4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f4c}', to: '\u{1f4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f4d}', to: '\u{1f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f4e}', to: '\u{1f4f}', mapping: Disallowed }, + Range { from: '\u{1f50}', to: '\u{1f57}', mapping: Valid }, + Range { from: '\u{1f58}', to: '\u{1f58}', mapping: Disallowed }, + Range { from: '\u{1f59}', to: '\u{1f59}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f5a}', to: '\u{1f5a}', mapping: Disallowed }, + Range { from: '\u{1f5b}', to: '\u{1f5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f5c}', to: '\u{1f5c}', mapping: Disallowed }, + Range { from: '\u{1f5d}', to: '\u{1f5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f5e}', to: '\u{1f5e}', mapping: Disallowed }, + Range { from: '\u{1f5f}', to: '\u{1f5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f60}', to: '\u{1f67}', mapping: Valid }, + Range { from: '\u{1f68}', to: '\u{1f68}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f69}', to: '\u{1f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6a}', to: '\u{1f6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6b}', to: '\u{1f6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6c}', to: '\u{1f6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6d}', to: '\u{1f6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6e}', to: '\u{1f6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f6f}', to: '\u{1f6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1f70}', to: '\u{1f70}', mapping: Valid }, + Range { from: '\u{1f71}', to: '\u{1f71}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f72}', to: '\u{1f72}', mapping: Valid }, + Range { from: '\u{1f73}', to: '\u{1f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f74}', to: '\u{1f74}', mapping: Valid }, + Range { from: '\u{1f75}', to: '\u{1f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f76}', to: '\u{1f76}', mapping: Valid }, + Range { from: '\u{1f77}', to: '\u{1f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f78}', to: '\u{1f78}', mapping: Valid }, + Range { from: '\u{1f79}', to: '\u{1f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f7a}', to: '\u{1f7a}', mapping: Valid }, + Range { from: '\u{1f7b}', to: '\u{1f7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f7c}', to: '\u{1f7c}', mapping: Valid }, + Range { from: '\u{1f7d}', to: '\u{1f7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1f7e}', to: '\u{1f7f}', mapping: Disallowed }, + Range { from: '\u{1f80}', to: '\u{1f80}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f81}', to: '\u{1f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f82}', to: '\u{1f82}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f83}', to: '\u{1f83}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f84}', to: '\u{1f84}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f85}', to: '\u{1f85}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f86}', to: '\u{1f86}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f87}', to: '\u{1f87}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f88}', to: '\u{1f88}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f89}', to: '\u{1f89}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8a}', to: '\u{1f8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8b}', to: '\u{1f8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8c}', to: '\u{1f8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8d}', to: '\u{1f8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8e}', to: '\u{1f8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f8f}', to: '\u{1f8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f90}', to: '\u{1f90}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f91}', to: '\u{1f91}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f92}', to: '\u{1f92}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f93}', to: '\u{1f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f94}', to: '\u{1f94}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f95}', to: '\u{1f95}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f96}', to: '\u{1f96}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f97}', to: '\u{1f97}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f98}', to: '\u{1f98}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f99}', to: '\u{1f99}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9a}', to: '\u{1f9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9b}', to: '\u{1f9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9c}', to: '\u{1f9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9d}', to: '\u{1f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9e}', to: '\u{1f9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1f9f}', to: '\u{1f9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa0}', to: '\u{1fa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa1}', to: '\u{1fa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa2}', to: '\u{1fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa3}', to: '\u{1fa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa4}', to: '\u{1fa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa5}', to: '\u{1fa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa6}', to: '\u{1fa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa7}', to: '\u{1fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa8}', to: '\u{1fa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fa9}', to: '\u{1fa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1faa}', to: '\u{1faa}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fab}', to: '\u{1fab}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fac}', to: '\u{1fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fad}', to: '\u{1fad}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fae}', to: '\u{1fae}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1faf}', to: '\u{1faf}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fb0}', to: '\u{1fb1}', mapping: Valid }, + Range { from: '\u{1fb2}', to: '\u{1fb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fb3}', to: '\u{1fb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }) }, + Range { from: '\u{1fb4}', to: '\u{1fb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 7, byte_len: 4 }) }, + Range { from: '\u{1fb5}', to: '\u{1fb5}', mapping: Disallowed }, + Range { from: '\u{1fb6}', to: '\u{1fb6}', mapping: Valid }, + Range { from: '\u{1fb7}', to: '\u{1fb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fb8}', to: '\u{1fb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fb9}', to: '\u{1fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fba}', to: '\u{1fba}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fbb}', to: '\u{1fbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1fbc}', to: '\u{1fbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }) }, + Range { from: '\u{1fbd}', to: '\u{1fbd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fbe}', to: '\u{1fbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1fbf}', to: '\u{1fbf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fc0}', to: '\u{1fc0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 7, byte_len: 3 }) }, + Range { from: '\u{1fc1}', to: '\u{1fc1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fc2}', to: '\u{1fc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 7, byte_len: 5 }) }, + Range { from: '\u{1fc3}', to: '\u{1fc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }) }, + Range { from: '\u{1fc4}', to: '\u{1fc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 8, byte_len: 4 }) }, + Range { from: '\u{1fc5}', to: '\u{1fc5}', mapping: Disallowed }, + Range { from: '\u{1fc6}', to: '\u{1fc6}', mapping: Valid }, + Range { from: '\u{1fc7}', to: '\u{1fc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fc8}', to: '\u{1fc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fc9}', to: '\u{1fc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1fca}', to: '\u{1fca}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fcb}', to: '\u{1fcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1fcc}', to: '\u{1fcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }) }, + Range { from: '\u{1fcd}', to: '\u{1fcd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fce}', to: '\u{1fce}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fcf}', to: '\u{1fcf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fd0}', to: '\u{1fd2}', mapping: Valid }, + Range { from: '\u{1fd3}', to: '\u{1fd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{1fd4}', to: '\u{1fd5}', mapping: Disallowed }, + Range { from: '\u{1fd6}', to: '\u{1fd7}', mapping: Valid }, + Range { from: '\u{1fd8}', to: '\u{1fd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fd9}', to: '\u{1fd9}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fda}', to: '\u{1fda}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fdb}', to: '\u{1fdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1fdc}', to: '\u{1fdc}', mapping: Disallowed }, + Range { from: '\u{1fdd}', to: '\u{1fdd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fde}', to: '\u{1fde}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fdf}', to: '\u{1fdf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fe0}', to: '\u{1fe2}', mapping: Valid }, + Range { from: '\u{1fe3}', to: '\u{1fe3}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{1fe4}', to: '\u{1fe7}', mapping: Valid }, + Range { from: '\u{1fe8}', to: '\u{1fe8}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fe9}', to: '\u{1fe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fea}', to: '\u{1fea}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1feb}', to: '\u{1feb}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1fec}', to: '\u{1fec}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fed}', to: '\u{1fed}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1fee}', to: '\u{1fee}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }) }, + Range { from: '\u{1fef}', to: '\u{1fef}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1ff0}', to: '\u{1ff1}', mapping: Disallowed }, + Range { from: '\u{1ff2}', to: '\u{1ff2}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1ff3}', to: '\u{1ff3}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }) }, + Range { from: '\u{1ff4}', to: '\u{1ff4}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 8, byte_len: 4 }) }, + Range { from: '\u{1ff5}', to: '\u{1ff5}', mapping: Disallowed }, + Range { from: '\u{1ff6}', to: '\u{1ff6}', mapping: Valid }, + Range { from: '\u{1ff7}', to: '\u{1ff7}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{1ff8}', to: '\u{1ff8}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1ff9}', to: '\u{1ff9}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1ffa}', to: '\u{1ffa}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1ffb}', to: '\u{1ffb}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1ffc}', to: '\u{1ffc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }) }, + Range { from: '\u{1ffd}', to: '\u{1ffd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{1ffe}', to: '\u{1ffe}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{1fff}', to: '\u{1fff}', mapping: Disallowed }, + Range { from: '\u{2000}', to: '\u{200a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{200b}', to: '\u{200b}', mapping: Ignored }, + Range { from: '\u{200c}', to: '\u{200d}', mapping: Deviation(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 0 }) }, + Range { from: '\u{200e}', to: '\u{200f}', mapping: Disallowed }, + Range { from: '\u{2010}', to: '\u{2010}', mapping: Valid }, + Range { from: '\u{2011}', to: '\u{2011}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2012}', to: '\u{2016}', mapping: Valid }, + Range { from: '\u{2017}', to: '\u{2017}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2018}', to: '\u{2023}', mapping: Valid }, + Range { from: '\u{2024}', to: '\u{2026}', mapping: Disallowed }, + Range { from: '\u{2027}', to: '\u{2027}', mapping: Valid }, + Range { from: '\u{2028}', to: '\u{202e}', mapping: Disallowed }, + Range { from: '\u{202f}', to: '\u{202f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2030}', to: '\u{2032}', mapping: Valid }, + Range { from: '\u{2033}', to: '\u{2033}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 8, byte_len: 6 }) }, + Range { from: '\u{2034}', to: '\u{2034}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 8, byte_len: 9 }) }, + Range { from: '\u{2035}', to: '\u{2035}', mapping: Valid }, + Range { from: '\u{2036}', to: '\u{2036}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 8, byte_len: 6 }) }, + Range { from: '\u{2037}', to: '\u{2037}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 8, byte_len: 9 }) }, + Range { from: '\u{2038}', to: '\u{203b}', mapping: Valid }, + Range { from: '\u{203c}', to: '\u{203c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{203d}', to: '\u{203d}', mapping: Valid }, + Range { from: '\u{203e}', to: '\u{203e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{203f}', to: '\u{2046}', mapping: Valid }, + Range { from: '\u{2047}', to: '\u{2047}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2048}', to: '\u{2048}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2049}', to: '\u{2049}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{204a}', to: '\u{2056}', mapping: Valid }, + Range { from: '\u{2057}', to: '\u{2057}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 8, byte_len: 12 }) }, + Range { from: '\u{2058}', to: '\u{205e}', mapping: Valid }, + Range { from: '\u{205f}', to: '\u{205f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2060}', to: '\u{2060}', mapping: Ignored }, + Range { from: '\u{2061}', to: '\u{2063}', mapping: Disallowed }, + Range { from: '\u{2064}', to: '\u{2064}', mapping: Ignored }, + Range { from: '\u{2065}', to: '\u{206f}', mapping: Disallowed }, + Range { from: '\u{2070}', to: '\u{2070}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2071}', to: '\u{2071}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2072}', to: '\u{2073}', mapping: Disallowed }, + Range { from: '\u{2074}', to: '\u{2074}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2075}', to: '\u{2075}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2076}', to: '\u{2076}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2077}', to: '\u{2077}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2078}', to: '\u{2078}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2079}', to: '\u{2079}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{207a}', to: '\u{207a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{207b}', to: '\u{207b}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{207c}', to: '\u{207c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{207d}', to: '\u{207d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{207e}', to: '\u{207e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{207f}', to: '\u{207f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2080}', to: '\u{2080}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2081}', to: '\u{2081}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2082}', to: '\u{2082}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2083}', to: '\u{2083}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2084}', to: '\u{2084}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2085}', to: '\u{2085}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2086}', to: '\u{2086}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2087}', to: '\u{2087}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2088}', to: '\u{2088}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2089}', to: '\u{2089}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{208a}', to: '\u{208a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{208b}', to: '\u{208b}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{208c}', to: '\u{208c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{208d}', to: '\u{208d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{208e}', to: '\u{208e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{208f}', to: '\u{208f}', mapping: Disallowed }, + Range { from: '\u{2090}', to: '\u{2090}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2091}', to: '\u{2091}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2092}', to: '\u{2092}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2093}', to: '\u{2093}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2094}', to: '\u{2094}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2095}', to: '\u{2095}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2096}', to: '\u{2096}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2097}', to: '\u{2097}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2098}', to: '\u{2098}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2099}', to: '\u{2099}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{209a}', to: '\u{209a}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{209b}', to: '\u{209b}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{209c}', to: '\u{209c}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{209d}', to: '\u{209f}', mapping: Disallowed }, + Range { from: '\u{20a0}', to: '\u{20a7}', mapping: Valid }, + Range { from: '\u{20a8}', to: '\u{20a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{20a9}', to: '\u{20be}', mapping: Valid }, + Range { from: '\u{20bf}', to: '\u{20cf}', mapping: Disallowed }, + Range { from: '\u{20d0}', to: '\u{20f0}', mapping: Valid }, + Range { from: '\u{20f1}', to: '\u{20ff}', mapping: Disallowed }, + Range { from: '\u{2100}', to: '\u{2100}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2101}', to: '\u{2101}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2102}', to: '\u{2102}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2103}', to: '\u{2103}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2104}', to: '\u{2104}', mapping: Valid }, + Range { from: '\u{2105}', to: '\u{2105}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2106}', to: '\u{2106}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2107}', to: '\u{2107}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{2108}', to: '\u{2108}', mapping: Valid }, + Range { from: '\u{2109}', to: '\u{2109}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{210a}', to: '\u{210a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{210b}', to: '\u{210e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{210f}', to: '\u{210f}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{2110}', to: '\u{2111}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2112}', to: '\u{2113}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2114}', to: '\u{2114}', mapping: Valid }, + Range { from: '\u{2115}', to: '\u{2115}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2116}', to: '\u{2116}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2117}', to: '\u{2118}', mapping: Valid }, + Range { from: '\u{2119}', to: '\u{2119}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{211a}', to: '\u{211a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{211b}', to: '\u{211d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{211e}', to: '\u{211f}', mapping: Valid }, + Range { from: '\u{2120}', to: '\u{2120}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2121}', to: '\u{2121}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2122}', to: '\u{2122}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2123}', to: '\u{2123}', mapping: Valid }, + Range { from: '\u{2124}', to: '\u{2124}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2125}', to: '\u{2125}', mapping: Valid }, + Range { from: '\u{2126}', to: '\u{2126}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{2127}', to: '\u{2127}', mapping: Valid }, + Range { from: '\u{2128}', to: '\u{2128}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2129}', to: '\u{2129}', mapping: Valid }, + Range { from: '\u{212a}', to: '\u{212a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{212b}', to: '\u{212b}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{212c}', to: '\u{212c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{212d}', to: '\u{212d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{212e}', to: '\u{212e}', mapping: Valid }, + Range { from: '\u{212f}', to: '\u{2130}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2131}', to: '\u{2131}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2132}', to: '\u{2132}', mapping: Disallowed }, + Range { from: '\u{2133}', to: '\u{2133}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2134}', to: '\u{2134}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2135}', to: '\u{2135}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2136}', to: '\u{2136}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2137}', to: '\u{2137}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2138}', to: '\u{2138}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{2139}', to: '\u{2139}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{213a}', to: '\u{213a}', mapping: Valid }, + Range { from: '\u{213b}', to: '\u{213b}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{213c}', to: '\u{213c}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{213d}', to: '\u{213e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{213f}', to: '\u{213f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{2140}', to: '\u{2140}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{2141}', to: '\u{2144}', mapping: Valid }, + Range { from: '\u{2145}', to: '\u{2146}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2147}', to: '\u{2147}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2148}', to: '\u{2148}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2149}', to: '\u{2149}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{214a}', to: '\u{214f}', mapping: Valid }, + Range { from: '\u{2150}', to: '\u{2150}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2151}', to: '\u{2151}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2152}', to: '\u{2152}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 8, byte_len: 6 }) }, + Range { from: '\u{2153}', to: '\u{2153}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2154}', to: '\u{2154}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2155}', to: '\u{2155}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2156}', to: '\u{2156}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 8, byte_len: 5 }) }, + Range { from: '\u{2157}', to: '\u{2157}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{2158}', to: '\u{2158}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{2159}', to: '\u{2159}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215a}', to: '\u{215a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215b}', to: '\u{215b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215c}', to: '\u{215c}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215d}', to: '\u{215d}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215e}', to: '\u{215e}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{215f}', to: '\u{215f}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2160}', to: '\u{2160}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2161}', to: '\u{2161}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2162}', to: '\u{2162}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2163}', to: '\u{2163}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2164}', to: '\u{2164}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2165}', to: '\u{2165}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2166}', to: '\u{2166}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2167}', to: '\u{2167}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2168}', to: '\u{2168}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2169}', to: '\u{2169}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{216a}', to: '\u{216a}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{216b}', to: '\u{216b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{216c}', to: '\u{216c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{216d}', to: '\u{216d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{216e}', to: '\u{216e}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{216f}', to: '\u{216f}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2170}', to: '\u{2170}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2171}', to: '\u{2171}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2172}', to: '\u{2172}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2173}', to: '\u{2173}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2174}', to: '\u{2174}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2175}', to: '\u{2175}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2176}', to: '\u{2176}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2177}', to: '\u{2177}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2178}', to: '\u{2178}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2179}', to: '\u{2179}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{217a}', to: '\u{217a}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{217b}', to: '\u{217b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{217c}', to: '\u{217c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{217d}', to: '\u{217d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{217e}', to: '\u{217e}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{217f}', to: '\u{217f}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2180}', to: '\u{2182}', mapping: Valid }, + Range { from: '\u{2183}', to: '\u{2183}', mapping: Disallowed }, + Range { from: '\u{2184}', to: '\u{2188}', mapping: Valid }, + Range { from: '\u{2189}', to: '\u{2189}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 9, byte_len: 5 }) }, + Range { from: '\u{218a}', to: '\u{218b}', mapping: Valid }, + Range { from: '\u{218c}', to: '\u{218f}', mapping: Disallowed }, + Range { from: '\u{2190}', to: '\u{222b}', mapping: Valid }, + Range { from: '\u{222c}', to: '\u{222c}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 9, byte_len: 6 }) }, + Range { from: '\u{222d}', to: '\u{222d}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 9, byte_len: 9 }) }, + Range { from: '\u{222e}', to: '\u{222e}', mapping: Valid }, + Range { from: '\u{222f}', to: '\u{222f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 9, byte_len: 6 }) }, + Range { from: '\u{2230}', to: '\u{2230}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 9, byte_len: 9 }) }, + Range { from: '\u{2231}', to: '\u{225f}', mapping: Valid }, + Range { from: '\u{2260}', to: '\u{2260}', mapping: DisallowedStd3Valid }, + Range { from: '\u{2261}', to: '\u{226d}', mapping: Valid }, + Range { from: '\u{226e}', to: '\u{226f}', mapping: DisallowedStd3Valid }, + Range { from: '\u{2270}', to: '\u{2328}', mapping: Valid }, + Range { from: '\u{2329}', to: '\u{2329}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{232a}', to: '\u{232a}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{232b}', to: '\u{23fe}', mapping: Valid }, + Range { from: '\u{23ff}', to: '\u{23ff}', mapping: Disallowed }, + Range { from: '\u{2400}', to: '\u{2426}', mapping: Valid }, + Range { from: '\u{2427}', to: '\u{243f}', mapping: Disallowed }, + Range { from: '\u{2440}', to: '\u{244a}', mapping: Valid }, + Range { from: '\u{244b}', to: '\u{245f}', mapping: Disallowed }, + Range { from: '\u{2460}', to: '\u{2460}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2461}', to: '\u{2461}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2462}', to: '\u{2462}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2463}', to: '\u{2463}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2464}', to: '\u{2464}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2465}', to: '\u{2465}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2466}', to: '\u{2466}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2467}', to: '\u{2467}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2468}', to: '\u{2468}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{2469}', to: '\u{2469}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246a}', to: '\u{246a}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246b}', to: '\u{246b}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246c}', to: '\u{246c}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246d}', to: '\u{246d}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246e}', to: '\u{246e}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{246f}', to: '\u{246f}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2470}', to: '\u{2470}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2471}', to: '\u{2471}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2472}', to: '\u{2472}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2473}', to: '\u{2473}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 9, byte_len: 2 }) }, + Range { from: '\u{2474}', to: '\u{2474}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2475}', to: '\u{2475}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2476}', to: '\u{2476}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2477}', to: '\u{2477}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2478}', to: '\u{2478}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{2479}', to: '\u{2479}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{247a}', to: '\u{247a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{247b}', to: '\u{247b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{247c}', to: '\u{247c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{247d}', to: '\u{247d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{247e}', to: '\u{247e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{247f}', to: '\u{247f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2480}', to: '\u{2480}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2481}', to: '\u{2481}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2482}', to: '\u{2482}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2483}', to: '\u{2483}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2484}', to: '\u{2484}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2485}', to: '\u{2485}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2486}', to: '\u{2486}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2487}', to: '\u{2487}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 9, byte_len: 4 }) }, + Range { from: '\u{2488}', to: '\u{249b}', mapping: Disallowed }, + Range { from: '\u{249c}', to: '\u{249c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{249d}', to: '\u{249d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{249e}', to: '\u{249e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{249f}', to: '\u{249f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a0}', to: '\u{24a0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a1}', to: '\u{24a1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a2}', to: '\u{24a2}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a3}', to: '\u{24a3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a4}', to: '\u{24a4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a5}', to: '\u{24a5}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a6}', to: '\u{24a6}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a7}', to: '\u{24a7}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a8}', to: '\u{24a8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24a9}', to: '\u{24a9}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24aa}', to: '\u{24aa}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24ab}', to: '\u{24ab}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24ac}', to: '\u{24ac}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24ad}', to: '\u{24ad}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{24ae}', to: '\u{24ae}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24af}', to: '\u{24af}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b0}', to: '\u{24b0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b1}', to: '\u{24b1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b2}', to: '\u{24b2}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b3}', to: '\u{24b3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b4}', to: '\u{24b4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b5}', to: '\u{24b5}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{24b6}', to: '\u{24b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24b7}', to: '\u{24b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24b8}', to: '\u{24b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24b9}', to: '\u{24b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24ba}', to: '\u{24ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24bb}', to: '\u{24bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24bc}', to: '\u{24bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24bd}', to: '\u{24bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24be}', to: '\u{24be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24bf}', to: '\u{24bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c0}', to: '\u{24c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c1}', to: '\u{24c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c2}', to: '\u{24c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c3}', to: '\u{24c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c4}', to: '\u{24c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c5}', to: '\u{24c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c6}', to: '\u{24c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c7}', to: '\u{24c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c8}', to: '\u{24c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24c9}', to: '\u{24c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24ca}', to: '\u{24ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24cb}', to: '\u{24cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24cc}', to: '\u{24cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24cd}', to: '\u{24cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24ce}', to: '\u{24ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24cf}', to: '\u{24cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d0}', to: '\u{24d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d1}', to: '\u{24d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d2}', to: '\u{24d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d3}', to: '\u{24d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d4}', to: '\u{24d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d5}', to: '\u{24d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d6}', to: '\u{24d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d7}', to: '\u{24d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d8}', to: '\u{24d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24d9}', to: '\u{24d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24da}', to: '\u{24da}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24db}', to: '\u{24db}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24dc}', to: '\u{24dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24dd}', to: '\u{24dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24de}', to: '\u{24de}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24df}', to: '\u{24df}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e0}', to: '\u{24e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e1}', to: '\u{24e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e2}', to: '\u{24e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e3}', to: '\u{24e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e4}', to: '\u{24e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e5}', to: '\u{24e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e6}', to: '\u{24e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e7}', to: '\u{24e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e8}', to: '\u{24e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24e9}', to: '\u{24e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{24ea}', to: '\u{24ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{24eb}', to: '\u{2a0b}', mapping: Valid }, + Range { from: '\u{2a0c}', to: '\u{2a0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 10, byte_len: 12 }) }, + Range { from: '\u{2a0d}', to: '\u{2a73}', mapping: Valid }, + Range { from: '\u{2a74}', to: '\u{2a74}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2a75}', to: '\u{2a75}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{2a76}', to: '\u{2a76}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2a77}', to: '\u{2adb}', mapping: Valid }, + Range { from: '\u{2adc}', to: '\u{2adc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 10, byte_len: 5 }) }, + Range { from: '\u{2add}', to: '\u{2b73}', mapping: Valid }, + Range { from: '\u{2b74}', to: '\u{2b75}', mapping: Disallowed }, + Range { from: '\u{2b76}', to: '\u{2b95}', mapping: Valid }, + Range { from: '\u{2b96}', to: '\u{2b97}', mapping: Disallowed }, + Range { from: '\u{2b98}', to: '\u{2bb9}', mapping: Valid }, + Range { from: '\u{2bba}', to: '\u{2bbc}', mapping: Disallowed }, + Range { from: '\u{2bbd}', to: '\u{2bc8}', mapping: Valid }, + Range { from: '\u{2bc9}', to: '\u{2bc9}', mapping: Disallowed }, + Range { from: '\u{2bca}', to: '\u{2bd1}', mapping: Valid }, + Range { from: '\u{2bd2}', to: '\u{2beb}', mapping: Disallowed }, + Range { from: '\u{2bec}', to: '\u{2bef}', mapping: Valid }, + Range { from: '\u{2bf0}', to: '\u{2bff}', mapping: Disallowed }, + Range { from: '\u{2c00}', to: '\u{2c00}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c01}', to: '\u{2c01}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c02}', to: '\u{2c02}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c03}', to: '\u{2c03}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c04}', to: '\u{2c04}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c05}', to: '\u{2c05}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c06}', to: '\u{2c06}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c07}', to: '\u{2c07}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c08}', to: '\u{2c08}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c09}', to: '\u{2c09}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0a}', to: '\u{2c0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0b}', to: '\u{2c0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0c}', to: '\u{2c0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0d}', to: '\u{2c0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0e}', to: '\u{2c0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c0f}', to: '\u{2c0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c10}', to: '\u{2c10}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c11}', to: '\u{2c11}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c12}', to: '\u{2c12}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c13}', to: '\u{2c13}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c14}', to: '\u{2c14}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c15}', to: '\u{2c15}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c16}', to: '\u{2c16}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c17}', to: '\u{2c17}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c18}', to: '\u{2c18}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c19}', to: '\u{2c19}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1a}', to: '\u{2c1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1b}', to: '\u{2c1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1c}', to: '\u{2c1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1d}', to: '\u{2c1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1e}', to: '\u{2c1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c1f}', to: '\u{2c1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c20}', to: '\u{2c20}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c21}', to: '\u{2c21}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c22}', to: '\u{2c22}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c23}', to: '\u{2c23}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c24}', to: '\u{2c24}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c25}', to: '\u{2c25}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c26}', to: '\u{2c26}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c27}', to: '\u{2c27}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c28}', to: '\u{2c28}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c29}', to: '\u{2c29}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2a}', to: '\u{2c2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2b}', to: '\u{2c2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2c}', to: '\u{2c2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2d}', to: '\u{2c2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2e}', to: '\u{2c2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c2f}', to: '\u{2c2f}', mapping: Disallowed }, + Range { from: '\u{2c30}', to: '\u{2c5e}', mapping: Valid }, + Range { from: '\u{2c5f}', to: '\u{2c5f}', mapping: Disallowed }, + Range { from: '\u{2c60}', to: '\u{2c60}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c61}', to: '\u{2c61}', mapping: Valid }, + Range { from: '\u{2c62}', to: '\u{2c62}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{2c63}', to: '\u{2c63}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c64}', to: '\u{2c64}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{2c65}', to: '\u{2c66}', mapping: Valid }, + Range { from: '\u{2c67}', to: '\u{2c67}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c68}', to: '\u{2c68}', mapping: Valid }, + Range { from: '\u{2c69}', to: '\u{2c69}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c6a}', to: '\u{2c6a}', mapping: Valid }, + Range { from: '\u{2c6b}', to: '\u{2c6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c6c}', to: '\u{2c6c}', mapping: Valid }, + Range { from: '\u{2c6d}', to: '\u{2c6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{2c6e}', to: '\u{2c6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{2c6f}', to: '\u{2c6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{2c70}', to: '\u{2c70}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{2c71}', to: '\u{2c71}', mapping: Valid }, + Range { from: '\u{2c72}', to: '\u{2c72}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c73}', to: '\u{2c74}', mapping: Valid }, + Range { from: '\u{2c75}', to: '\u{2c75}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c76}', to: '\u{2c7b}', mapping: Valid }, + Range { from: '\u{2c7c}', to: '\u{2c7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2c7d}', to: '\u{2c7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{2c7e}', to: '\u{2c7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{2c7f}', to: '\u{2c7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{2c80}', to: '\u{2c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c81}', to: '\u{2c81}', mapping: Valid }, + Range { from: '\u{2c82}', to: '\u{2c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c83}', to: '\u{2c83}', mapping: Valid }, + Range { from: '\u{2c84}', to: '\u{2c84}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c85}', to: '\u{2c85}', mapping: Valid }, + Range { from: '\u{2c86}', to: '\u{2c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c87}', to: '\u{2c87}', mapping: Valid }, + Range { from: '\u{2c88}', to: '\u{2c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c89}', to: '\u{2c89}', mapping: Valid }, + Range { from: '\u{2c8a}', to: '\u{2c8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c8b}', to: '\u{2c8b}', mapping: Valid }, + Range { from: '\u{2c8c}', to: '\u{2c8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c8d}', to: '\u{2c8d}', mapping: Valid }, + Range { from: '\u{2c8e}', to: '\u{2c8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c8f}', to: '\u{2c8f}', mapping: Valid }, + Range { from: '\u{2c90}', to: '\u{2c90}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c91}', to: '\u{2c91}', mapping: Valid }, + Range { from: '\u{2c92}', to: '\u{2c92}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c93}', to: '\u{2c93}', mapping: Valid }, + Range { from: '\u{2c94}', to: '\u{2c94}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c95}', to: '\u{2c95}', mapping: Valid }, + Range { from: '\u{2c96}', to: '\u{2c96}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c97}', to: '\u{2c97}', mapping: Valid }, + Range { from: '\u{2c98}', to: '\u{2c98}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{2c99}', to: '\u{2c99}', mapping: Valid }, + Range { from: '\u{2c9a}', to: '\u{2c9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2c9b}', to: '\u{2c9b}', mapping: Valid }, + Range { from: '\u{2c9c}', to: '\u{2c9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2c9d}', to: '\u{2c9d}', mapping: Valid }, + Range { from: '\u{2c9e}', to: '\u{2c9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2c9f}', to: '\u{2c9f}', mapping: Valid }, + Range { from: '\u{2ca0}', to: '\u{2ca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ca1}', to: '\u{2ca1}', mapping: Valid }, + Range { from: '\u{2ca2}', to: '\u{2ca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ca3}', to: '\u{2ca3}', mapping: Valid }, + Range { from: '\u{2ca4}', to: '\u{2ca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ca5}', to: '\u{2ca5}', mapping: Valid }, + Range { from: '\u{2ca6}', to: '\u{2ca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ca7}', to: '\u{2ca7}', mapping: Valid }, + Range { from: '\u{2ca8}', to: '\u{2ca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ca9}', to: '\u{2ca9}', mapping: Valid }, + Range { from: '\u{2caa}', to: '\u{2caa}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cab}', to: '\u{2cab}', mapping: Valid }, + Range { from: '\u{2cac}', to: '\u{2cac}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cad}', to: '\u{2cad}', mapping: Valid }, + Range { from: '\u{2cae}', to: '\u{2cae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2caf}', to: '\u{2caf}', mapping: Valid }, + Range { from: '\u{2cb0}', to: '\u{2cb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cb1}', to: '\u{2cb1}', mapping: Valid }, + Range { from: '\u{2cb2}', to: '\u{2cb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cb3}', to: '\u{2cb3}', mapping: Valid }, + Range { from: '\u{2cb4}', to: '\u{2cb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cb5}', to: '\u{2cb5}', mapping: Valid }, + Range { from: '\u{2cb6}', to: '\u{2cb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cb7}', to: '\u{2cb7}', mapping: Valid }, + Range { from: '\u{2cb8}', to: '\u{2cb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cb9}', to: '\u{2cb9}', mapping: Valid }, + Range { from: '\u{2cba}', to: '\u{2cba}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cbb}', to: '\u{2cbb}', mapping: Valid }, + Range { from: '\u{2cbc}', to: '\u{2cbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cbd}', to: '\u{2cbd}', mapping: Valid }, + Range { from: '\u{2cbe}', to: '\u{2cbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cbf}', to: '\u{2cbf}', mapping: Valid }, + Range { from: '\u{2cc0}', to: '\u{2cc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cc1}', to: '\u{2cc1}', mapping: Valid }, + Range { from: '\u{2cc2}', to: '\u{2cc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cc3}', to: '\u{2cc3}', mapping: Valid }, + Range { from: '\u{2cc4}', to: '\u{2cc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cc5}', to: '\u{2cc5}', mapping: Valid }, + Range { from: '\u{2cc6}', to: '\u{2cc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cc7}', to: '\u{2cc7}', mapping: Valid }, + Range { from: '\u{2cc8}', to: '\u{2cc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cc9}', to: '\u{2cc9}', mapping: Valid }, + Range { from: '\u{2cca}', to: '\u{2cca}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ccb}', to: '\u{2ccb}', mapping: Valid }, + Range { from: '\u{2ccc}', to: '\u{2ccc}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ccd}', to: '\u{2ccd}', mapping: Valid }, + Range { from: '\u{2cce}', to: '\u{2cce}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ccf}', to: '\u{2ccf}', mapping: Valid }, + Range { from: '\u{2cd0}', to: '\u{2cd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cd1}', to: '\u{2cd1}', mapping: Valid }, + Range { from: '\u{2cd2}', to: '\u{2cd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cd3}', to: '\u{2cd3}', mapping: Valid }, + Range { from: '\u{2cd4}', to: '\u{2cd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cd5}', to: '\u{2cd5}', mapping: Valid }, + Range { from: '\u{2cd6}', to: '\u{2cd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cd7}', to: '\u{2cd7}', mapping: Valid }, + Range { from: '\u{2cd8}', to: '\u{2cd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cd9}', to: '\u{2cd9}', mapping: Valid }, + Range { from: '\u{2cda}', to: '\u{2cda}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cdb}', to: '\u{2cdb}', mapping: Valid }, + Range { from: '\u{2cdc}', to: '\u{2cdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cdd}', to: '\u{2cdd}', mapping: Valid }, + Range { from: '\u{2cde}', to: '\u{2cde}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cdf}', to: '\u{2cdf}', mapping: Valid }, + Range { from: '\u{2ce0}', to: '\u{2ce0}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ce1}', to: '\u{2ce1}', mapping: Valid }, + Range { from: '\u{2ce2}', to: '\u{2ce2}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ce3}', to: '\u{2cea}', mapping: Valid }, + Range { from: '\u{2ceb}', to: '\u{2ceb}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cec}', to: '\u{2cec}', mapping: Valid }, + Range { from: '\u{2ced}', to: '\u{2ced}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cee}', to: '\u{2cf1}', mapping: Valid }, + Range { from: '\u{2cf2}', to: '\u{2cf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2cf3}', to: '\u{2cf3}', mapping: Valid }, + Range { from: '\u{2cf4}', to: '\u{2cf8}', mapping: Disallowed }, + Range { from: '\u{2cf9}', to: '\u{2d25}', mapping: Valid }, + Range { from: '\u{2d26}', to: '\u{2d26}', mapping: Disallowed }, + Range { from: '\u{2d27}', to: '\u{2d27}', mapping: Valid }, + Range { from: '\u{2d28}', to: '\u{2d2c}', mapping: Disallowed }, + Range { from: '\u{2d2d}', to: '\u{2d2d}', mapping: Valid }, + Range { from: '\u{2d2e}', to: '\u{2d2f}', mapping: Disallowed }, + Range { from: '\u{2d30}', to: '\u{2d67}', mapping: Valid }, + Range { from: '\u{2d68}', to: '\u{2d6e}', mapping: Disallowed }, + Range { from: '\u{2d6f}', to: '\u{2d6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2d70}', to: '\u{2d70}', mapping: Valid }, + Range { from: '\u{2d71}', to: '\u{2d7e}', mapping: Disallowed }, + Range { from: '\u{2d7f}', to: '\u{2d96}', mapping: Valid }, + Range { from: '\u{2d97}', to: '\u{2d9f}', mapping: Disallowed }, + Range { from: '\u{2da0}', to: '\u{2da6}', mapping: Valid }, + Range { from: '\u{2da7}', to: '\u{2da7}', mapping: Disallowed }, + Range { from: '\u{2da8}', to: '\u{2dae}', mapping: Valid }, + Range { from: '\u{2daf}', to: '\u{2daf}', mapping: Disallowed }, + Range { from: '\u{2db0}', to: '\u{2db6}', mapping: Valid }, + Range { from: '\u{2db7}', to: '\u{2db7}', mapping: Disallowed }, + Range { from: '\u{2db8}', to: '\u{2dbe}', mapping: Valid }, + Range { from: '\u{2dbf}', to: '\u{2dbf}', mapping: Disallowed }, + Range { from: '\u{2dc0}', to: '\u{2dc6}', mapping: Valid }, + Range { from: '\u{2dc7}', to: '\u{2dc7}', mapping: Disallowed }, + Range { from: '\u{2dc8}', to: '\u{2dce}', mapping: Valid }, + Range { from: '\u{2dcf}', to: '\u{2dcf}', mapping: Disallowed }, + Range { from: '\u{2dd0}', to: '\u{2dd6}', mapping: Valid }, + Range { from: '\u{2dd7}', to: '\u{2dd7}', mapping: Disallowed }, + Range { from: '\u{2dd8}', to: '\u{2dde}', mapping: Valid }, + Range { from: '\u{2ddf}', to: '\u{2ddf}', mapping: Disallowed }, + Range { from: '\u{2de0}', to: '\u{2e44}', mapping: Valid }, + Range { from: '\u{2e45}', to: '\u{2e7f}', mapping: Disallowed }, + Range { from: '\u{2e80}', to: '\u{2e99}', mapping: Valid }, + Range { from: '\u{2e9a}', to: '\u{2e9a}', mapping: Disallowed }, + Range { from: '\u{2e9b}', to: '\u{2e9e}', mapping: Valid }, + Range { from: '\u{2e9f}', to: '\u{2e9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ea0}', to: '\u{2ef2}', mapping: Valid }, + Range { from: '\u{2ef3}', to: '\u{2ef3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2ef4}', to: '\u{2eff}', mapping: Disallowed }, + Range { from: '\u{2f00}', to: '\u{2f00}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f01}', to: '\u{2f01}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f02}', to: '\u{2f02}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f03}', to: '\u{2f03}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f04}', to: '\u{2f04}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f05}', to: '\u{2f05}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f06}', to: '\u{2f06}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f07}', to: '\u{2f07}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f08}', to: '\u{2f08}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f09}', to: '\u{2f09}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0a}', to: '\u{2f0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0b}', to: '\u{2f0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0c}', to: '\u{2f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0d}', to: '\u{2f0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0e}', to: '\u{2f0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f0f}', to: '\u{2f0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f10}', to: '\u{2f10}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f11}', to: '\u{2f11}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f12}', to: '\u{2f12}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f13}', to: '\u{2f13}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f14}', to: '\u{2f14}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f15}', to: '\u{2f15}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f16}', to: '\u{2f16}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f17}', to: '\u{2f17}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f18}', to: '\u{2f18}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f19}', to: '\u{2f19}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1a}', to: '\u{2f1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1b}', to: '\u{2f1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1c}', to: '\u{2f1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1d}', to: '\u{2f1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1e}', to: '\u{2f1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f1f}', to: '\u{2f1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f20}', to: '\u{2f20}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f21}', to: '\u{2f21}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f22}', to: '\u{2f22}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f23}', to: '\u{2f23}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f24}', to: '\u{2f24}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f25}', to: '\u{2f25}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f26}', to: '\u{2f26}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f27}', to: '\u{2f27}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f28}', to: '\u{2f28}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f29}', to: '\u{2f29}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f2a}', to: '\u{2f2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f2b}', to: '\u{2f2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f2c}', to: '\u{2f2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f2d}', to: '\u{2f2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f2e}', to: '\u{2f2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f2f}', to: '\u{2f2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f30}', to: '\u{2f30}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f31}', to: '\u{2f31}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f32}', to: '\u{2f32}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f33}', to: '\u{2f33}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f34}', to: '\u{2f34}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f35}', to: '\u{2f35}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f36}', to: '\u{2f36}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f37}', to: '\u{2f37}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f38}', to: '\u{2f38}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f39}', to: '\u{2f39}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3a}', to: '\u{2f3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3b}', to: '\u{2f3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3c}', to: '\u{2f3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3d}', to: '\u{2f3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3e}', to: '\u{2f3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f3f}', to: '\u{2f3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f40}', to: '\u{2f40}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f41}', to: '\u{2f41}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f42}', to: '\u{2f42}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f43}', to: '\u{2f43}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f44}', to: '\u{2f44}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f45}', to: '\u{2f45}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f46}', to: '\u{2f46}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f47}', to: '\u{2f47}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f48}', to: '\u{2f48}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f49}', to: '\u{2f49}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4a}', to: '\u{2f4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4b}', to: '\u{2f4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4c}', to: '\u{2f4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4d}', to: '\u{2f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4e}', to: '\u{2f4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f4f}', to: '\u{2f4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f50}', to: '\u{2f50}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f51}', to: '\u{2f51}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f52}', to: '\u{2f52}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f53}', to: '\u{2f53}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f54}', to: '\u{2f54}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f55}', to: '\u{2f55}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f56}', to: '\u{2f56}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f57}', to: '\u{2f57}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f58}', to: '\u{2f58}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f59}', to: '\u{2f59}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5a}', to: '\u{2f5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5b}', to: '\u{2f5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5c}', to: '\u{2f5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5d}', to: '\u{2f5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5e}', to: '\u{2f5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f5f}', to: '\u{2f5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f60}', to: '\u{2f60}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f61}', to: '\u{2f61}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f62}', to: '\u{2f62}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f63}', to: '\u{2f63}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f64}', to: '\u{2f64}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f65}', to: '\u{2f65}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f66}', to: '\u{2f66}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f67}', to: '\u{2f67}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f68}', to: '\u{2f68}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f69}', to: '\u{2f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6a}', to: '\u{2f6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6b}', to: '\u{2f6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6c}', to: '\u{2f6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6d}', to: '\u{2f6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6e}', to: '\u{2f6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f6f}', to: '\u{2f6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f70}', to: '\u{2f70}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f71}', to: '\u{2f71}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f72}', to: '\u{2f72}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f73}', to: '\u{2f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f74}', to: '\u{2f74}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f75}', to: '\u{2f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f76}', to: '\u{2f76}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f77}', to: '\u{2f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f78}', to: '\u{2f78}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f79}', to: '\u{2f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7a}', to: '\u{2f7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7b}', to: '\u{2f7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7c}', to: '\u{2f7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7d}', to: '\u{2f7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7e}', to: '\u{2f7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f7f}', to: '\u{2f7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f80}', to: '\u{2f80}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f81}', to: '\u{2f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f82}', to: '\u{2f82}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f83}', to: '\u{2f83}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f84}', to: '\u{2f84}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f85}', to: '\u{2f85}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f86}', to: '\u{2f86}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f87}', to: '\u{2f87}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f88}', to: '\u{2f88}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f89}', to: '\u{2f89}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8a}', to: '\u{2f8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8b}', to: '\u{2f8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8c}', to: '\u{2f8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8d}', to: '\u{2f8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8e}', to: '\u{2f8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f8f}', to: '\u{2f8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f90}', to: '\u{2f90}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f91}', to: '\u{2f91}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f92}', to: '\u{2f92}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f93}', to: '\u{2f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f94}', to: '\u{2f94}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f95}', to: '\u{2f95}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f96}', to: '\u{2f96}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f97}', to: '\u{2f97}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f98}', to: '\u{2f98}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f99}', to: '\u{2f99}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9a}', to: '\u{2f9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9b}', to: '\u{2f9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9c}', to: '\u{2f9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9d}', to: '\u{2f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9e}', to: '\u{2f9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9f}', to: '\u{2f9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa0}', to: '\u{2fa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa1}', to: '\u{2fa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa2}', to: '\u{2fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa3}', to: '\u{2fa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa4}', to: '\u{2fa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa5}', to: '\u{2fa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa6}', to: '\u{2fa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa7}', to: '\u{2fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa8}', to: '\u{2fa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa9}', to: '\u{2fa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2faa}', to: '\u{2faa}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fab}', to: '\u{2fab}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fac}', to: '\u{2fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fad}', to: '\u{2fad}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fae}', to: '\u{2fae}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2faf}', to: '\u{2faf}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb0}', to: '\u{2fb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb1}', to: '\u{2fb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb2}', to: '\u{2fb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb3}', to: '\u{2fb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb4}', to: '\u{2fb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb5}', to: '\u{2fb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb6}', to: '\u{2fb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb7}', to: '\u{2fb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb8}', to: '\u{2fb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fb9}', to: '\u{2fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fba}', to: '\u{2fba}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fbb}', to: '\u{2fbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fbc}', to: '\u{2fbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fbd}', to: '\u{2fbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fbe}', to: '\u{2fbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fbf}', to: '\u{2fbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc0}', to: '\u{2fc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc1}', to: '\u{2fc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc2}', to: '\u{2fc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc3}', to: '\u{2fc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc4}', to: '\u{2fc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc5}', to: '\u{2fc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc6}', to: '\u{2fc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc7}', to: '\u{2fc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc8}', to: '\u{2fc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fc9}', to: '\u{2fc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fca}', to: '\u{2fca}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fcb}', to: '\u{2fcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fcc}', to: '\u{2fcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fcd}', to: '\u{2fcd}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fce}', to: '\u{2fce}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fcf}', to: '\u{2fcf}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd0}', to: '\u{2fd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd1}', to: '\u{2fd1}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd2}', to: '\u{2fd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd3}', to: '\u{2fd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd4}', to: '\u{2fd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fd5}', to: '\u{2fd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{2fd6}', to: '\u{2fff}', mapping: Disallowed }, + Range { from: '\u{3000}', to: '\u{3000}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{3001}', to: '\u{3001}', mapping: Valid }, + Range { from: '\u{3002}', to: '\u{3002}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, + Range { from: '\u{3003}', to: '\u{3035}', mapping: Valid }, + Range { from: '\u{3036}', to: '\u{3036}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3037}', to: '\u{3037}', mapping: Valid }, + Range { from: '\u{3038}', to: '\u{3038}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3039}', to: '\u{3039}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{303a}', to: '\u{303a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{303b}', to: '\u{303f}', mapping: Valid }, + Range { from: '\u{3040}', to: '\u{3040}', mapping: Disallowed }, + Range { from: '\u{3041}', to: '\u{3096}', mapping: Valid }, + Range { from: '\u{3097}', to: '\u{3098}', mapping: Disallowed }, + Range { from: '\u{3099}', to: '\u{309a}', mapping: Valid }, + Range { from: '\u{309b}', to: '\u{309b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 14, byte_len: 4 }) }, + Range { from: '\u{309c}', to: '\u{309c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 14, byte_len: 4 }) }, + Range { from: '\u{309d}', to: '\u{309e}', mapping: Valid }, + Range { from: '\u{309f}', to: '\u{309f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 14, byte_len: 6 }) }, + Range { from: '\u{30a0}', to: '\u{30fe}', mapping: Valid }, + Range { from: '\u{30ff}', to: '\u{30ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 14, byte_len: 6 }) }, + Range { from: '\u{3100}', to: '\u{3104}', mapping: Disallowed }, + Range { from: '\u{3105}', to: '\u{312d}', mapping: Valid }, + Range { from: '\u{312e}', to: '\u{3130}', mapping: Disallowed }, + Range { from: '\u{3131}', to: '\u{3131}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3132}', to: '\u{3132}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3133}', to: '\u{3133}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3134}', to: '\u{3134}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3135}', to: '\u{3135}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3136}', to: '\u{3136}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3137}', to: '\u{3137}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3138}', to: '\u{3138}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3139}', to: '\u{3139}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313a}', to: '\u{313a}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313b}', to: '\u{313b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313c}', to: '\u{313c}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313d}', to: '\u{313d}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313e}', to: '\u{313e}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{313f}', to: '\u{313f}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3140}', to: '\u{3140}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3141}', to: '\u{3141}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3142}', to: '\u{3142}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3143}', to: '\u{3143}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3144}', to: '\u{3144}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3145}', to: '\u{3145}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3146}', to: '\u{3146}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3147}', to: '\u{3147}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3148}', to: '\u{3148}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3149}', to: '\u{3149}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314a}', to: '\u{314a}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314b}', to: '\u{314b}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314c}', to: '\u{314c}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314d}', to: '\u{314d}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314e}', to: '\u{314e}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{314f}', to: '\u{314f}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3150}', to: '\u{3150}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3151}', to: '\u{3151}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3152}', to: '\u{3152}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3153}', to: '\u{3153}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3154}', to: '\u{3154}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3155}', to: '\u{3155}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3156}', to: '\u{3156}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3157}', to: '\u{3157}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3158}', to: '\u{3158}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3159}', to: '\u{3159}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315a}', to: '\u{315a}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315b}', to: '\u{315b}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315c}', to: '\u{315c}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315d}', to: '\u{315d}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315e}', to: '\u{315e}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{315f}', to: '\u{315f}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3160}', to: '\u{3160}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3161}', to: '\u{3161}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3162}', to: '\u{3162}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3163}', to: '\u{3163}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3164}', to: '\u{3164}', mapping: Disallowed }, + Range { from: '\u{3165}', to: '\u{3165}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3166}', to: '\u{3166}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3167}', to: '\u{3167}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3168}', to: '\u{3168}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3169}', to: '\u{3169}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316a}', to: '\u{316a}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316b}', to: '\u{316b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316c}', to: '\u{316c}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316d}', to: '\u{316d}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316e}', to: '\u{316e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{316f}', to: '\u{316f}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3170}', to: '\u{3170}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3171}', to: '\u{3171}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3172}', to: '\u{3172}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3173}', to: '\u{3173}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3174}', to: '\u{3174}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3175}', to: '\u{3175}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3176}', to: '\u{3176}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3177}', to: '\u{3177}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3178}', to: '\u{3178}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3179}', to: '\u{3179}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{317a}', to: '\u{317a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{317b}', to: '\u{317b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{317c}', to: '\u{317c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{317d}', to: '\u{317d}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{317e}', to: '\u{317e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{317f}', to: '\u{317f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3180}', to: '\u{3180}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3181}', to: '\u{3181}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3182}', to: '\u{3182}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3183}', to: '\u{3183}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3184}', to: '\u{3184}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3185}', to: '\u{3185}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3186}', to: '\u{3186}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3187}', to: '\u{3187}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3188}', to: '\u{3188}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3189}', to: '\u{3189}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318a}', to: '\u{318a}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318b}', to: '\u{318b}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318c}', to: '\u{318c}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318d}', to: '\u{318d}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318e}', to: '\u{318e}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{318f}', to: '\u{318f}', mapping: Disallowed }, + Range { from: '\u{3190}', to: '\u{3191}', mapping: Valid }, + Range { from: '\u{3192}', to: '\u{3192}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3193}', to: '\u{3193}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3194}', to: '\u{3194}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3195}', to: '\u{3195}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3196}', to: '\u{3196}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3197}', to: '\u{3197}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3198}', to: '\u{3198}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3199}', to: '\u{3199}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{319a}', to: '\u{319a}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{319b}', to: '\u{319b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{319c}', to: '\u{319c}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{319d}', to: '\u{319d}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{319e}', to: '\u{319e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{319f}', to: '\u{319f}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{31a0}', to: '\u{31ba}', mapping: Valid }, + Range { from: '\u{31bb}', to: '\u{31bf}', mapping: Disallowed }, + Range { from: '\u{31c0}', to: '\u{31e3}', mapping: Valid }, + Range { from: '\u{31e4}', to: '\u{31ef}', mapping: Disallowed }, + Range { from: '\u{31f0}', to: '\u{31ff}', mapping: Valid }, + Range { from: '\u{3200}', to: '\u{3200}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3201}', to: '\u{3201}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3202}', to: '\u{3202}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3203}', to: '\u{3203}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3204}', to: '\u{3204}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3205}', to: '\u{3205}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3206}', to: '\u{3206}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3207}', to: '\u{3207}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3208}', to: '\u{3208}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3209}', to: '\u{3209}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320a}', to: '\u{320a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320b}', to: '\u{320b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320c}', to: '\u{320c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320d}', to: '\u{320d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320e}', to: '\u{320e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{320f}', to: '\u{320f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3210}', to: '\u{3210}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3211}', to: '\u{3211}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3212}', to: '\u{3212}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3213}', to: '\u{3213}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3214}', to: '\u{3214}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3215}', to: '\u{3215}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3216}', to: '\u{3216}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3217}', to: '\u{3217}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3218}', to: '\u{3218}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3219}', to: '\u{3219}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{321a}', to: '\u{321a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{321b}', to: '\u{321b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{321c}', to: '\u{321c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{321d}', to: '\u{321d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 15, byte_len: 8 }) }, + Range { from: '\u{321e}', to: '\u{321e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 15, byte_len: 8 }) }, + Range { from: '\u{321f}', to: '\u{321f}', mapping: Disallowed }, + Range { from: '\u{3220}', to: '\u{3220}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3221}', to: '\u{3221}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 15, byte_len: 5 }) }, + Range { from: '\u{3222}', to: '\u{3222}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3223}', to: '\u{3223}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3224}', to: '\u{3224}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3225}', to: '\u{3225}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3226}', to: '\u{3226}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3227}', to: '\u{3227}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3228}', to: '\u{3228}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3229}', to: '\u{3229}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322a}', to: '\u{322a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322b}', to: '\u{322b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322c}', to: '\u{322c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322d}', to: '\u{322d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322e}', to: '\u{322e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{322f}', to: '\u{322f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3230}', to: '\u{3230}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3231}', to: '\u{3231}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3232}', to: '\u{3232}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3233}', to: '\u{3233}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3234}', to: '\u{3234}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3235}', to: '\u{3235}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3236}', to: '\u{3236}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3237}', to: '\u{3237}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3238}', to: '\u{3238}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3239}', to: '\u{3239}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323a}', to: '\u{323a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323b}', to: '\u{323b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323c}', to: '\u{323c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323d}', to: '\u{323d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323e}', to: '\u{323e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{323f}', to: '\u{323f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3240}', to: '\u{3240}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3241}', to: '\u{3241}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3242}', to: '\u{3242}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3243}', to: '\u{3243}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 16, byte_len: 5 }) }, + Range { from: '\u{3244}', to: '\u{3244}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3245}', to: '\u{3245}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3246}', to: '\u{3246}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{3247}', to: '\u{3247}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3248}', to: '\u{324f}', mapping: Valid }, + Range { from: '\u{3250}', to: '\u{3250}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3251}', to: '\u{3251}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3252}', to: '\u{3252}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3253}', to: '\u{3253}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3254}', to: '\u{3254}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3255}', to: '\u{3255}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3256}', to: '\u{3256}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3257}', to: '\u{3257}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3258}', to: '\u{3258}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3259}', to: '\u{3259}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325a}', to: '\u{325a}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325b}', to: '\u{325b}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325c}', to: '\u{325c}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325d}', to: '\u{325d}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325e}', to: '\u{325e}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{325f}', to: '\u{325f}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 16, byte_len: 2 }) }, + Range { from: '\u{3260}', to: '\u{3260}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3261}', to: '\u{3261}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3262}', to: '\u{3262}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3263}', to: '\u{3263}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3264}', to: '\u{3264}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3265}', to: '\u{3265}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3266}', to: '\u{3266}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3267}', to: '\u{3267}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3268}', to: '\u{3268}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{3269}', to: '\u{3269}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{326a}', to: '\u{326a}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{326b}', to: '\u{326b}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{326c}', to: '\u{326c}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{326d}', to: '\u{326d}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{326e}', to: '\u{326e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{326f}', to: '\u{326f}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3270}', to: '\u{3270}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3271}', to: '\u{3271}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3272}', to: '\u{3272}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3273}', to: '\u{3273}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3274}', to: '\u{3274}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3275}', to: '\u{3275}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3276}', to: '\u{3276}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3277}', to: '\u{3277}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3278}', to: '\u{3278}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{3279}', to: '\u{3279}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{327a}', to: '\u{327a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{327b}', to: '\u{327b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 16, byte_len: 3 }) }, + Range { from: '\u{327c}', to: '\u{327c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 17, byte_len: 6 }) }, + Range { from: '\u{327d}', to: '\u{327d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 17, byte_len: 6 }) }, + Range { from: '\u{327e}', to: '\u{327e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{327f}', to: '\u{327f}', mapping: Valid }, + Range { from: '\u{3280}', to: '\u{3280}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3281}', to: '\u{3281}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3282}', to: '\u{3282}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3283}', to: '\u{3283}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{3284}', to: '\u{3284}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3285}', to: '\u{3285}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3286}', to: '\u{3286}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3287}', to: '\u{3287}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3288}', to: '\u{3288}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3289}', to: '\u{3289}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{328a}', to: '\u{328a}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{328b}', to: '\u{328b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{328c}', to: '\u{328c}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{328d}', to: '\u{328d}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{328e}', to: '\u{328e}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{328f}', to: '\u{328f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{3290}', to: '\u{3290}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{3291}', to: '\u{3291}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3292}', to: '\u{3292}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3293}', to: '\u{3293}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3294}', to: '\u{3294}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3295}', to: '\u{3295}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3296}', to: '\u{3296}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3297}', to: '\u{3297}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3298}', to: '\u{3298}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{3299}', to: '\u{3299}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{329a}', to: '\u{329a}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{329b}', to: '\u{329b}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{329c}', to: '\u{329c}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{329d}', to: '\u{329d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{329e}', to: '\u{329e}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{329f}', to: '\u{329f}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a0}', to: '\u{32a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a1}', to: '\u{32a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a2}', to: '\u{32a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a3}', to: '\u{32a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a4}', to: '\u{32a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{32a5}', to: '\u{32a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{32a6}', to: '\u{32a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{32a7}', to: '\u{32a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a8}', to: '\u{32a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32a9}', to: '\u{32a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32aa}', to: '\u{32aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32ab}', to: '\u{32ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32ac}', to: '\u{32ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32ad}', to: '\u{32ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32ae}', to: '\u{32ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32af}', to: '\u{32af}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32b0}', to: '\u{32b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32b1}', to: '\u{32b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b2}', to: '\u{32b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b3}', to: '\u{32b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b4}', to: '\u{32b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b5}', to: '\u{32b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b6}', to: '\u{32b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b7}', to: '\u{32b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b8}', to: '\u{32b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32b9}', to: '\u{32b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32ba}', to: '\u{32ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32bb}', to: '\u{32bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32bc}', to: '\u{32bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32bd}', to: '\u{32bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32be}', to: '\u{32be}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32bf}', to: '\u{32bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32c0}', to: '\u{32c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c1}', to: '\u{32c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c2}', to: '\u{32c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c3}', to: '\u{32c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c4}', to: '\u{32c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c5}', to: '\u{32c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c6}', to: '\u{32c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c7}', to: '\u{32c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c8}', to: '\u{32c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 17, byte_len: 4 }) }, + Range { from: '\u{32c9}', to: '\u{32c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 17, byte_len: 5 }) }, + Range { from: '\u{32ca}', to: '\u{32ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 17, byte_len: 5 }) }, + Range { from: '\u{32cb}', to: '\u{32cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 17, byte_len: 5 }) }, + Range { from: '\u{32cc}', to: '\u{32cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32cd}', to: '\u{32cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32ce}', to: '\u{32ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 17, byte_len: 2 }) }, + Range { from: '\u{32cf}', to: '\u{32cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d0}', to: '\u{32d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d1}', to: '\u{32d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d2}', to: '\u{32d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d3}', to: '\u{32d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d4}', to: '\u{32d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d5}', to: '\u{32d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d6}', to: '\u{32d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d7}', to: '\u{32d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d8}', to: '\u{32d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32d9}', to: '\u{32d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32da}', to: '\u{32da}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32db}', to: '\u{32db}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32dc}', to: '\u{32dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32dd}', to: '\u{32dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32de}', to: '\u{32de}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32df}', to: '\u{32df}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32e0}', to: '\u{32e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32e1}', to: '\u{32e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{32e2}', to: '\u{32e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e3}', to: '\u{32e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e4}', to: '\u{32e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e5}', to: '\u{32e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e6}', to: '\u{32e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e7}', to: '\u{32e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e8}', to: '\u{32e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32e9}', to: '\u{32e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ea}', to: '\u{32ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32eb}', to: '\u{32eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ec}', to: '\u{32ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ed}', to: '\u{32ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ee}', to: '\u{32ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ef}', to: '\u{32ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f0}', to: '\u{32f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f1}', to: '\u{32f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f2}', to: '\u{32f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f3}', to: '\u{32f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f4}', to: '\u{32f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f5}', to: '\u{32f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f6}', to: '\u{32f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f7}', to: '\u{32f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f8}', to: '\u{32f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32f9}', to: '\u{32f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32fa}', to: '\u{32fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32fb}', to: '\u{32fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32fc}', to: '\u{32fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32fd}', to: '\u{32fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32fe}', to: '\u{32fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{32ff}', to: '\u{32ff}', mapping: Disallowed }, + Range { from: '\u{3300}', to: '\u{3300}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{3301}', to: '\u{3301}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{3302}', to: '\u{3302}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{3303}', to: '\u{3303}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{3304}', to: '\u{3304}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{3305}', to: '\u{3305}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{3306}', to: '\u{3306}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{3307}', to: '\u{3307}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 18, byte_len: 15 }) }, + Range { from: '\u{3308}', to: '\u{3308}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{3309}', to: '\u{3309}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{330a}', to: '\u{330a}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{330b}', to: '\u{330b}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{330c}', to: '\u{330c}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{330d}', to: '\u{330d}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 18, byte_len: 12 }) }, + Range { from: '\u{330e}', to: '\u{330e}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{330f}', to: '\u{330f}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 18, byte_len: 9 }) }, + Range { from: '\u{3310}', to: '\u{3310}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 19, byte_len: 6 }) }, + Range { from: '\u{3311}', to: '\u{3311}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{3312}', to: '\u{3312}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 19, byte_len: 12 }) }, + Range { from: '\u{3313}', to: '\u{3313}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 19, byte_len: 12 }) }, + Range { from: '\u{3314}', to: '\u{3314}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 19, byte_len: 6 }) }, + Range { from: '\u{3315}', to: '\u{3315}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 19, byte_len: 15 }) }, + Range { from: '\u{3316}', to: '\u{3316}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 19, byte_len: 18 }) }, + Range { from: '\u{3317}', to: '\u{3317}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 19, byte_len: 15 }) }, + Range { from: '\u{3318}', to: '\u{3318}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{3319}', to: '\u{3319}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 19, byte_len: 15 }) }, + Range { from: '\u{331a}', to: '\u{331a}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 19, byte_len: 15 }) }, + Range { from: '\u{331b}', to: '\u{331b}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 19, byte_len: 12 }) }, + Range { from: '\u{331c}', to: '\u{331c}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{331d}', to: '\u{331d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{331e}', to: '\u{331e}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{331f}', to: '\u{331f}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 19, byte_len: 12 }) }, + Range { from: '\u{3320}', to: '\u{3320}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 19, byte_len: 15 }) }, + Range { from: '\u{3321}', to: '\u{3321}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 19, byte_len: 12 }) }, + Range { from: '\u{3322}', to: '\u{3322}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{3323}', to: '\u{3323}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{3324}', to: '\u{3324}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 19, byte_len: 9 }) }, + Range { from: '\u{3325}', to: '\u{3325}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 19, byte_len: 6 }) }, + Range { from: '\u{3326}', to: '\u{3326}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 19, byte_len: 6 }) }, + Range { from: '\u{3327}', to: '\u{3327}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 19, byte_len: 6 }) }, + Range { from: '\u{3328}', to: '\u{3328}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 20, byte_len: 6 }) }, + Range { from: '\u{3329}', to: '\u{3329}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{332a}', to: '\u{332a}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{332b}', to: '\u{332b}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 20, byte_len: 15 }) }, + Range { from: '\u{332c}', to: '\u{332c}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{332d}', to: '\u{332d}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 20, byte_len: 12 }) }, + Range { from: '\u{332e}', to: '\u{332e}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 20, byte_len: 15 }) }, + Range { from: '\u{332f}', to: '\u{332f}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{3330}', to: '\u{3330}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 20, byte_len: 6 }) }, + Range { from: '\u{3331}', to: '\u{3331}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 20, byte_len: 6 }) }, + Range { from: '\u{3332}', to: '\u{3332}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 20, byte_len: 15 }) }, + Range { from: '\u{3333}', to: '\u{3333}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 20, byte_len: 12 }) }, + Range { from: '\u{3334}', to: '\u{3334}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 20, byte_len: 15 }) }, + Range { from: '\u{3335}', to: '\u{3335}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{3336}', to: '\u{3336}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 20, byte_len: 15 }) }, + Range { from: '\u{3337}', to: '\u{3337}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 20, byte_len: 6 }) }, + Range { from: '\u{3338}', to: '\u{3338}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{3339}', to: '\u{3339}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{333a}', to: '\u{333a}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{333b}', to: '\u{333b}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{333c}', to: '\u{333c}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{333d}', to: '\u{333d}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 20, byte_len: 12 }) }, + Range { from: '\u{333e}', to: '\u{333e}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{333f}', to: '\u{333f}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 20, byte_len: 6 }) }, + Range { from: '\u{3340}', to: '\u{3340}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{3341}', to: '\u{3341}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 20, byte_len: 9 }) }, + Range { from: '\u{3342}', to: '\u{3342}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3343}', to: '\u{3343}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{3344}', to: '\u{3344}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3345}', to: '\u{3345}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3346}', to: '\u{3346}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3347}', to: '\u{3347}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 21, byte_len: 15 }) }, + Range { from: '\u{3348}', to: '\u{3348}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{3349}', to: '\u{3349}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 21, byte_len: 6 }) }, + Range { from: '\u{334a}', to: '\u{334a}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 21, byte_len: 15 }) }, + Range { from: '\u{334b}', to: '\u{334b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 21, byte_len: 6 }) }, + Range { from: '\u{334c}', to: '\u{334c}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{334d}', to: '\u{334d}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{334e}', to: '\u{334e}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{334f}', to: '\u{334f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3350}', to: '\u{3350}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3351}', to: '\u{3351}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{3352}', to: '\u{3352}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 21, byte_len: 6 }) }, + Range { from: '\u{3353}', to: '\u{3353}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3354}', to: '\u{3354}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 21, byte_len: 12 }) }, + Range { from: '\u{3355}', to: '\u{3355}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 21, byte_len: 6 }) }, + Range { from: '\u{3356}', to: '\u{3356}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 21, byte_len: 15 }) }, + Range { from: '\u{3357}', to: '\u{3357}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 21, byte_len: 9 }) }, + Range { from: '\u{3358}', to: '\u{3358}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{3359}', to: '\u{3359}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335a}', to: '\u{335a}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335b}', to: '\u{335b}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335c}', to: '\u{335c}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335d}', to: '\u{335d}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335e}', to: '\u{335e}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{335f}', to: '\u{335f}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 21, byte_len: 4 }) }, + Range { from: '\u{3360}', to: '\u{3360}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 22, byte_len: 4 }) }, + Range { from: '\u{3361}', to: '\u{3361}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 22, byte_len: 4 }) }, + Range { from: '\u{3362}', to: '\u{3362}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3363}', to: '\u{3363}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3364}', to: '\u{3364}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3365}', to: '\u{3365}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3366}', to: '\u{3366}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3367}', to: '\u{3367}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3368}', to: '\u{3368}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3369}', to: '\u{3369}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336a}', to: '\u{336a}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336b}', to: '\u{336b}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336c}', to: '\u{336c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336d}', to: '\u{336d}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336e}', to: '\u{336e}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{336f}', to: '\u{336f}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3370}', to: '\u{3370}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{3371}', to: '\u{3371}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3372}', to: '\u{3372}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3373}', to: '\u{3373}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3374}', to: '\u{3374}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3375}', to: '\u{3375}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3376}', to: '\u{3376}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3377}', to: '\u{3377}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3378}', to: '\u{3378}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3379}', to: '\u{3379}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{337a}', to: '\u{337a}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{337b}', to: '\u{337b}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 22, byte_len: 6 }) }, + Range { from: '\u{337c}', to: '\u{337c}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 22, byte_len: 6 }) }, + Range { from: '\u{337d}', to: '\u{337d}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 22, byte_len: 6 }) }, + Range { from: '\u{337e}', to: '\u{337e}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 22, byte_len: 6 }) }, + Range { from: '\u{337f}', to: '\u{337f}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 22, byte_len: 12 }) }, + Range { from: '\u{3380}', to: '\u{3380}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3381}', to: '\u{3381}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3382}', to: '\u{3382}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3383}', to: '\u{3383}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3384}', to: '\u{3384}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3385}', to: '\u{3385}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3386}', to: '\u{3386}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3387}', to: '\u{3387}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3388}', to: '\u{3388}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3389}', to: '\u{3389}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 22, byte_len: 4 }) }, + Range { from: '\u{338a}', to: '\u{338a}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{338b}', to: '\u{338b}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{338c}', to: '\u{338c}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{338d}', to: '\u{338d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{338e}', to: '\u{338e}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{338f}', to: '\u{338f}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3390}', to: '\u{3390}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3391}', to: '\u{3391}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3392}', to: '\u{3392}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3393}', to: '\u{3393}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3394}', to: '\u{3394}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3395}', to: '\u{3395}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{3396}', to: '\u{3396}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3397}', to: '\u{3397}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3398}', to: '\u{3398}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{3399}', to: '\u{3399}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{339a}', to: '\u{339a}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{339b}', to: '\u{339b}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{339c}', to: '\u{339c}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{339d}', to: '\u{339d}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{339e}', to: '\u{339e}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{339f}', to: '\u{339f}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a0}', to: '\u{33a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a1}', to: '\u{33a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{33a2}', to: '\u{33a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a3}', to: '\u{33a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a4}', to: '\u{33a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a5}', to: '\u{33a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{33a6}', to: '\u{33a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33a7}', to: '\u{33a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 22, byte_len: 5 }) }, + Range { from: '\u{33a8}', to: '\u{33a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 22, byte_len: 6 }) }, + Range { from: '\u{33a9}', to: '\u{33a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{33aa}', to: '\u{33aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33ab}', to: '\u{33ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 22, byte_len: 3 }) }, + Range { from: '\u{33ac}', to: '\u{33ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33ad}', to: '\u{33ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33ae}', to: '\u{33ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 23, byte_len: 7 }) }, + Range { from: '\u{33af}', to: '\u{33af}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 23, byte_len: 8 }) }, + Range { from: '\u{33b0}', to: '\u{33b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b1}', to: '\u{33b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b2}', to: '\u{33b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33b3}', to: '\u{33b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b4}', to: '\u{33b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b5}', to: '\u{33b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b6}', to: '\u{33b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33b7}', to: '\u{33b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b8}', to: '\u{33b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33b9}', to: '\u{33b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33ba}', to: '\u{33ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33bb}', to: '\u{33bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33bc}', to: '\u{33bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33bd}', to: '\u{33bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33be}', to: '\u{33be}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33bf}', to: '\u{33bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33c0}', to: '\u{33c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33c1}', to: '\u{33c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33c2}', to: '\u{33c2}', mapping: Disallowed }, + Range { from: '\u{33c3}', to: '\u{33c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33c4}', to: '\u{33c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33c5}', to: '\u{33c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33c6}', to: '\u{33c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 23, byte_len: 6 }) }, + Range { from: '\u{33c7}', to: '\u{33c7}', mapping: Disallowed }, + Range { from: '\u{33c8}', to: '\u{33c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33c9}', to: '\u{33c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33ca}', to: '\u{33ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33cb}', to: '\u{33cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33cc}', to: '\u{33cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33cd}', to: '\u{33cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33ce}', to: '\u{33ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{33cf}', to: '\u{33cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33d0}', to: '\u{33d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33d1}', to: '\u{33d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33d2}', to: '\u{33d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33d3}', to: '\u{33d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33d4}', to: '\u{33d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }) }, + Range { from: '\u{33d5}', to: '\u{33d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33d6}', to: '\u{33d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33d7}', to: '\u{33d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33d8}', to: '\u{33d8}', mapping: Disallowed }, + Range { from: '\u{33d9}', to: '\u{33d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 23, byte_len: 3 }) }, + Range { from: '\u{33da}', to: '\u{33da}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33db}', to: '\u{33db}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33dc}', to: '\u{33dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33dd}', to: '\u{33dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{33de}', to: '\u{33de}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33df}', to: '\u{33df}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33e0}', to: '\u{33e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e1}', to: '\u{33e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e2}', to: '\u{33e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e3}', to: '\u{33e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e4}', to: '\u{33e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e5}', to: '\u{33e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e6}', to: '\u{33e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e7}', to: '\u{33e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e8}', to: '\u{33e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 23, byte_len: 4 }) }, + Range { from: '\u{33e9}', to: '\u{33e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33ea}', to: '\u{33ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33eb}', to: '\u{33eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33ec}', to: '\u{33ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33ed}', to: '\u{33ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33ee}', to: '\u{33ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33ef}', to: '\u{33ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f0}', to: '\u{33f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f1}', to: '\u{33f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f2}', to: '\u{33f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f3}', to: '\u{33f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f4}', to: '\u{33f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f5}', to: '\u{33f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f6}', to: '\u{33f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f7}', to: '\u{33f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f8}', to: '\u{33f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33f9}', to: '\u{33f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33fa}', to: '\u{33fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33fb}', to: '\u{33fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33fc}', to: '\u{33fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 23, byte_len: 5 }) }, + Range { from: '\u{33fd}', to: '\u{33fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 24, byte_len: 5 }) }, + Range { from: '\u{33fe}', to: '\u{33fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 24, byte_len: 5 }) }, + Range { from: '\u{33ff}', to: '\u{33ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{3400}', to: '\u{4db5}', mapping: Valid }, + Range { from: '\u{4db6}', to: '\u{4dbf}', mapping: Disallowed }, + Range { from: '\u{4dc0}', to: '\u{9fd5}', mapping: Valid }, + Range { from: '\u{9fd6}', to: '\u{9fff}', mapping: Disallowed }, + Range { from: '\u{a000}', to: '\u{a48c}', mapping: Valid }, + Range { from: '\u{a48d}', to: '\u{a48f}', mapping: Disallowed }, + Range { from: '\u{a490}', to: '\u{a4c6}', mapping: Valid }, + Range { from: '\u{a4c7}', to: '\u{a4cf}', mapping: Disallowed }, + Range { from: '\u{a4d0}', to: '\u{a62b}', mapping: Valid }, + Range { from: '\u{a62c}', to: '\u{a63f}', mapping: Disallowed }, + Range { from: '\u{a640}', to: '\u{a640}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a641}', to: '\u{a641}', mapping: Valid }, + Range { from: '\u{a642}', to: '\u{a642}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a643}', to: '\u{a643}', mapping: Valid }, + Range { from: '\u{a644}', to: '\u{a644}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a645}', to: '\u{a645}', mapping: Valid }, + Range { from: '\u{a646}', to: '\u{a646}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a647}', to: '\u{a647}', mapping: Valid }, + Range { from: '\u{a648}', to: '\u{a648}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a649}', to: '\u{a649}', mapping: Valid }, + Range { from: '\u{a64a}', to: '\u{a64a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }) }, + Range { from: '\u{a64b}', to: '\u{a64b}', mapping: Valid }, + Range { from: '\u{a64c}', to: '\u{a64c}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a64d}', to: '\u{a64d}', mapping: Valid }, + Range { from: '\u{a64e}', to: '\u{a64e}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a64f}', to: '\u{a64f}', mapping: Valid }, + Range { from: '\u{a650}', to: '\u{a650}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a651}', to: '\u{a651}', mapping: Valid }, + Range { from: '\u{a652}', to: '\u{a652}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a653}', to: '\u{a653}', mapping: Valid }, + Range { from: '\u{a654}', to: '\u{a654}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a655}', to: '\u{a655}', mapping: Valid }, + Range { from: '\u{a656}', to: '\u{a656}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a657}', to: '\u{a657}', mapping: Valid }, + Range { from: '\u{a658}', to: '\u{a658}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a659}', to: '\u{a659}', mapping: Valid }, + Range { from: '\u{a65a}', to: '\u{a65a}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a65b}', to: '\u{a65b}', mapping: Valid }, + Range { from: '\u{a65c}', to: '\u{a65c}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a65d}', to: '\u{a65d}', mapping: Valid }, + Range { from: '\u{a65e}', to: '\u{a65e}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a65f}', to: '\u{a65f}', mapping: Valid }, + Range { from: '\u{a660}', to: '\u{a660}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a661}', to: '\u{a661}', mapping: Valid }, + Range { from: '\u{a662}', to: '\u{a662}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a663}', to: '\u{a663}', mapping: Valid }, + Range { from: '\u{a664}', to: '\u{a664}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a665}', to: '\u{a665}', mapping: Valid }, + Range { from: '\u{a666}', to: '\u{a666}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a667}', to: '\u{a667}', mapping: Valid }, + Range { from: '\u{a668}', to: '\u{a668}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a669}', to: '\u{a669}', mapping: Valid }, + Range { from: '\u{a66a}', to: '\u{a66a}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a66b}', to: '\u{a66b}', mapping: Valid }, + Range { from: '\u{a66c}', to: '\u{a66c}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a66d}', to: '\u{a67f}', mapping: Valid }, + Range { from: '\u{a680}', to: '\u{a680}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a681}', to: '\u{a681}', mapping: Valid }, + Range { from: '\u{a682}', to: '\u{a682}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a683}', to: '\u{a683}', mapping: Valid }, + Range { from: '\u{a684}', to: '\u{a684}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a685}', to: '\u{a685}', mapping: Valid }, + Range { from: '\u{a686}', to: '\u{a686}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a687}', to: '\u{a687}', mapping: Valid }, + Range { from: '\u{a688}', to: '\u{a688}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a689}', to: '\u{a689}', mapping: Valid }, + Range { from: '\u{a68a}', to: '\u{a68a}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a68b}', to: '\u{a68b}', mapping: Valid }, + Range { from: '\u{a68c}', to: '\u{a68c}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a68d}', to: '\u{a68d}', mapping: Valid }, + Range { from: '\u{a68e}', to: '\u{a68e}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a68f}', to: '\u{a68f}', mapping: Valid }, + Range { from: '\u{a690}', to: '\u{a690}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a691}', to: '\u{a691}', mapping: Valid }, + Range { from: '\u{a692}', to: '\u{a692}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a693}', to: '\u{a693}', mapping: Valid }, + Range { from: '\u{a694}', to: '\u{a694}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a695}', to: '\u{a695}', mapping: Valid }, + Range { from: '\u{a696}', to: '\u{a696}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a697}', to: '\u{a697}', mapping: Valid }, + Range { from: '\u{a698}', to: '\u{a698}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a699}', to: '\u{a699}', mapping: Valid }, + Range { from: '\u{a69a}', to: '\u{a69a}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a69b}', to: '\u{a69b}', mapping: Valid }, + Range { from: '\u{a69c}', to: '\u{a69c}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{a69d}', to: '\u{a69d}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{a69e}', to: '\u{a6f7}', mapping: Valid }, + Range { from: '\u{a6f8}', to: '\u{a6ff}', mapping: Disallowed }, + Range { from: '\u{a700}', to: '\u{a721}', mapping: Valid }, + Range { from: '\u{a722}', to: '\u{a722}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a723}', to: '\u{a723}', mapping: Valid }, + Range { from: '\u{a724}', to: '\u{a724}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a725}', to: '\u{a725}', mapping: Valid }, + Range { from: '\u{a726}', to: '\u{a726}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a727}', to: '\u{a727}', mapping: Valid }, + Range { from: '\u{a728}', to: '\u{a728}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a729}', to: '\u{a729}', mapping: Valid }, + Range { from: '\u{a72a}', to: '\u{a72a}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a72b}', to: '\u{a72b}', mapping: Valid }, + Range { from: '\u{a72c}', to: '\u{a72c}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a72d}', to: '\u{a72d}', mapping: Valid }, + Range { from: '\u{a72e}', to: '\u{a72e}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a72f}', to: '\u{a731}', mapping: Valid }, + Range { from: '\u{a732}', to: '\u{a732}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a733}', to: '\u{a733}', mapping: Valid }, + Range { from: '\u{a734}', to: '\u{a734}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a735}', to: '\u{a735}', mapping: Valid }, + Range { from: '\u{a736}', to: '\u{a736}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a737}', to: '\u{a737}', mapping: Valid }, + Range { from: '\u{a738}', to: '\u{a738}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a739}', to: '\u{a739}', mapping: Valid }, + Range { from: '\u{a73a}', to: '\u{a73a}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a73b}', to: '\u{a73b}', mapping: Valid }, + Range { from: '\u{a73c}', to: '\u{a73c}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a73d}', to: '\u{a73d}', mapping: Valid }, + Range { from: '\u{a73e}', to: '\u{a73e}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a73f}', to: '\u{a73f}', mapping: Valid }, + Range { from: '\u{a740}', to: '\u{a740}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a741}', to: '\u{a741}', mapping: Valid }, + Range { from: '\u{a742}', to: '\u{a742}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a743}', to: '\u{a743}', mapping: Valid }, + Range { from: '\u{a744}', to: '\u{a744}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a745}', to: '\u{a745}', mapping: Valid }, + Range { from: '\u{a746}', to: '\u{a746}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a747}', to: '\u{a747}', mapping: Valid }, + Range { from: '\u{a748}', to: '\u{a748}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a749}', to: '\u{a749}', mapping: Valid }, + Range { from: '\u{a74a}', to: '\u{a74a}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a74b}', to: '\u{a74b}', mapping: Valid }, + Range { from: '\u{a74c}', to: '\u{a74c}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a74d}', to: '\u{a74d}', mapping: Valid }, + Range { from: '\u{a74e}', to: '\u{a74e}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a74f}', to: '\u{a74f}', mapping: Valid }, + Range { from: '\u{a750}', to: '\u{a750}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a751}', to: '\u{a751}', mapping: Valid }, + Range { from: '\u{a752}', to: '\u{a752}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a753}', to: '\u{a753}', mapping: Valid }, + Range { from: '\u{a754}', to: '\u{a754}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a755}', to: '\u{a755}', mapping: Valid }, + Range { from: '\u{a756}', to: '\u{a756}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a757}', to: '\u{a757}', mapping: Valid }, + Range { from: '\u{a758}', to: '\u{a758}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a759}', to: '\u{a759}', mapping: Valid }, + Range { from: '\u{a75a}', to: '\u{a75a}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a75b}', to: '\u{a75b}', mapping: Valid }, + Range { from: '\u{a75c}', to: '\u{a75c}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a75d}', to: '\u{a75d}', mapping: Valid }, + Range { from: '\u{a75e}', to: '\u{a75e}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a75f}', to: '\u{a75f}', mapping: Valid }, + Range { from: '\u{a760}', to: '\u{a760}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a761}', to: '\u{a761}', mapping: Valid }, + Range { from: '\u{a762}', to: '\u{a762}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a763}', to: '\u{a763}', mapping: Valid }, + Range { from: '\u{a764}', to: '\u{a764}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a765}', to: '\u{a765}', mapping: Valid }, + Range { from: '\u{a766}', to: '\u{a766}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a767}', to: '\u{a767}', mapping: Valid }, + Range { from: '\u{a768}', to: '\u{a768}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a769}', to: '\u{a769}', mapping: Valid }, + Range { from: '\u{a76a}', to: '\u{a76a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a76b}', to: '\u{a76b}', mapping: Valid }, + Range { from: '\u{a76c}', to: '\u{a76c}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a76d}', to: '\u{a76d}', mapping: Valid }, + Range { from: '\u{a76e}', to: '\u{a76e}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a76f}', to: '\u{a76f}', mapping: Valid }, + Range { from: '\u{a770}', to: '\u{a770}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a771}', to: '\u{a778}', mapping: Valid }, + Range { from: '\u{a779}', to: '\u{a779}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a77a}', to: '\u{a77a}', mapping: Valid }, + Range { from: '\u{a77b}', to: '\u{a77b}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a77c}', to: '\u{a77c}', mapping: Valid }, + Range { from: '\u{a77d}', to: '\u{a77d}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a77e}', to: '\u{a77e}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a77f}', to: '\u{a77f}', mapping: Valid }, + Range { from: '\u{a780}', to: '\u{a780}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a781}', to: '\u{a781}', mapping: Valid }, + Range { from: '\u{a782}', to: '\u{a782}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{a783}', to: '\u{a783}', mapping: Valid }, + Range { from: '\u{a784}', to: '\u{a784}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a785}', to: '\u{a785}', mapping: Valid }, + Range { from: '\u{a786}', to: '\u{a786}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a787}', to: '\u{a78a}', mapping: Valid }, + Range { from: '\u{a78b}', to: '\u{a78b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a78c}', to: '\u{a78c}', mapping: Valid }, + Range { from: '\u{a78d}', to: '\u{a78d}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{a78e}', to: '\u{a78f}', mapping: Valid }, + Range { from: '\u{a790}', to: '\u{a790}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a791}', to: '\u{a791}', mapping: Valid }, + Range { from: '\u{a792}', to: '\u{a792}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a793}', to: '\u{a795}', mapping: Valid }, + Range { from: '\u{a796}', to: '\u{a796}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a797}', to: '\u{a797}', mapping: Valid }, + Range { from: '\u{a798}', to: '\u{a798}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a799}', to: '\u{a799}', mapping: Valid }, + Range { from: '\u{a79a}', to: '\u{a79a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a79b}', to: '\u{a79b}', mapping: Valid }, + Range { from: '\u{a79c}', to: '\u{a79c}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a79d}', to: '\u{a79d}', mapping: Valid }, + Range { from: '\u{a79e}', to: '\u{a79e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a79f}', to: '\u{a79f}', mapping: Valid }, + Range { from: '\u{a7a0}', to: '\u{a7a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7a1}', to: '\u{a7a1}', mapping: Valid }, + Range { from: '\u{a7a2}', to: '\u{a7a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7a3}', to: '\u{a7a3}', mapping: Valid }, + Range { from: '\u{a7a4}', to: '\u{a7a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7a5}', to: '\u{a7a5}', mapping: Valid }, + Range { from: '\u{a7a6}', to: '\u{a7a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7a7}', to: '\u{a7a7}', mapping: Valid }, + Range { from: '\u{a7a8}', to: '\u{a7a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7a9}', to: '\u{a7a9}', mapping: Valid }, + Range { from: '\u{a7aa}', to: '\u{a7aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{a7ab}', to: '\u{a7ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{a7ac}', to: '\u{a7ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{a7ad}', to: '\u{a7ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 25, byte_len: 2 }) }, + Range { from: '\u{a7ae}', to: '\u{a7ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{a7af}', to: '\u{a7af}', mapping: Disallowed }, + Range { from: '\u{a7b0}', to: '\u{a7b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 25, byte_len: 2 }) }, + Range { from: '\u{a7b1}', to: '\u{a7b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 25, byte_len: 2 }) }, + Range { from: '\u{a7b2}', to: '\u{a7b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }) }, + Range { from: '\u{a7b3}', to: '\u{a7b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7b4}', to: '\u{a7b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7b5}', to: '\u{a7b5}', mapping: Valid }, + Range { from: '\u{a7b6}', to: '\u{a7b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{a7b7}', to: '\u{a7b7}', mapping: Valid }, + Range { from: '\u{a7b8}', to: '\u{a7f6}', mapping: Disallowed }, + Range { from: '\u{a7f7}', to: '\u{a7f7}', mapping: Valid }, + Range { from: '\u{a7f8}', to: '\u{a7f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{a7f9}', to: '\u{a7f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{a7fa}', to: '\u{a82b}', mapping: Valid }, + Range { from: '\u{a82c}', to: '\u{a82f}', mapping: Disallowed }, + Range { from: '\u{a830}', to: '\u{a839}', mapping: Valid }, + Range { from: '\u{a83a}', to: '\u{a83f}', mapping: Disallowed }, + Range { from: '\u{a840}', to: '\u{a877}', mapping: Valid }, + Range { from: '\u{a878}', to: '\u{a87f}', mapping: Disallowed }, + Range { from: '\u{a880}', to: '\u{a8c5}', mapping: Valid }, + Range { from: '\u{a8c6}', to: '\u{a8cd}', mapping: Disallowed }, + Range { from: '\u{a8ce}', to: '\u{a8d9}', mapping: Valid }, + Range { from: '\u{a8da}', to: '\u{a8df}', mapping: Disallowed }, + Range { from: '\u{a8e0}', to: '\u{a8fd}', mapping: Valid }, + Range { from: '\u{a8fe}', to: '\u{a8ff}', mapping: Disallowed }, + Range { from: '\u{a900}', to: '\u{a953}', mapping: Valid }, + Range { from: '\u{a954}', to: '\u{a95e}', mapping: Disallowed }, + Range { from: '\u{a95f}', to: '\u{a97c}', mapping: Valid }, + Range { from: '\u{a97d}', to: '\u{a97f}', mapping: Disallowed }, + Range { from: '\u{a980}', to: '\u{a9cd}', mapping: Valid }, + Range { from: '\u{a9ce}', to: '\u{a9ce}', mapping: Disallowed }, + Range { from: '\u{a9cf}', to: '\u{a9d9}', mapping: Valid }, + Range { from: '\u{a9da}', to: '\u{a9dd}', mapping: Disallowed }, + Range { from: '\u{a9de}', to: '\u{a9fe}', mapping: Valid }, + Range { from: '\u{a9ff}', to: '\u{a9ff}', mapping: Disallowed }, + Range { from: '\u{aa00}', to: '\u{aa36}', mapping: Valid }, + Range { from: '\u{aa37}', to: '\u{aa3f}', mapping: Disallowed }, + Range { from: '\u{aa40}', to: '\u{aa4d}', mapping: Valid }, + Range { from: '\u{aa4e}', to: '\u{aa4f}', mapping: Disallowed }, + Range { from: '\u{aa50}', to: '\u{aa59}', mapping: Valid }, + Range { from: '\u{aa5a}', to: '\u{aa5b}', mapping: Disallowed }, + Range { from: '\u{aa5c}', to: '\u{aac2}', mapping: Valid }, + Range { from: '\u{aac3}', to: '\u{aada}', mapping: Disallowed }, + Range { from: '\u{aadb}', to: '\u{aaf6}', mapping: Valid }, + Range { from: '\u{aaf7}', to: '\u{ab00}', mapping: Disallowed }, + Range { from: '\u{ab01}', to: '\u{ab06}', mapping: Valid }, + Range { from: '\u{ab07}', to: '\u{ab08}', mapping: Disallowed }, + Range { from: '\u{ab09}', to: '\u{ab0e}', mapping: Valid }, + Range { from: '\u{ab0f}', to: '\u{ab10}', mapping: Disallowed }, + Range { from: '\u{ab11}', to: '\u{ab16}', mapping: Valid }, + Range { from: '\u{ab17}', to: '\u{ab1f}', mapping: Disallowed }, + Range { from: '\u{ab20}', to: '\u{ab26}', mapping: Valid }, + Range { from: '\u{ab27}', to: '\u{ab27}', mapping: Disallowed }, + Range { from: '\u{ab28}', to: '\u{ab2e}', mapping: Valid }, + Range { from: '\u{ab2f}', to: '\u{ab2f}', mapping: Disallowed }, + Range { from: '\u{ab30}', to: '\u{ab5b}', mapping: Valid }, + Range { from: '\u{ab5c}', to: '\u{ab5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }) }, + Range { from: '\u{ab5d}', to: '\u{ab5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab5e}', to: '\u{ab5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }) }, + Range { from: '\u{ab5f}', to: '\u{ab5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab60}', to: '\u{ab65}', mapping: Valid }, + Range { from: '\u{ab66}', to: '\u{ab6f}', mapping: Disallowed }, + Range { from: '\u{ab70}', to: '\u{ab70}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab71}', to: '\u{ab71}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab72}', to: '\u{ab72}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab73}', to: '\u{ab73}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab74}', to: '\u{ab74}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab75}', to: '\u{ab75}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab76}', to: '\u{ab76}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab77}', to: '\u{ab77}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab78}', to: '\u{ab78}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab79}', to: '\u{ab79}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7a}', to: '\u{ab7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7b}', to: '\u{ab7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7c}', to: '\u{ab7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7d}', to: '\u{ab7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7e}', to: '\u{ab7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab7f}', to: '\u{ab7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab80}', to: '\u{ab80}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab81}', to: '\u{ab81}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab82}', to: '\u{ab82}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab83}', to: '\u{ab83}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab84}', to: '\u{ab84}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab85}', to: '\u{ab85}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab86}', to: '\u{ab86}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab87}', to: '\u{ab87}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab88}', to: '\u{ab88}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab89}', to: '\u{ab89}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8a}', to: '\u{ab8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8b}', to: '\u{ab8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8c}', to: '\u{ab8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8d}', to: '\u{ab8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8e}', to: '\u{ab8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab8f}', to: '\u{ab8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab90}', to: '\u{ab90}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab91}', to: '\u{ab91}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab92}', to: '\u{ab92}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab93}', to: '\u{ab93}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab94}', to: '\u{ab94}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab95}', to: '\u{ab95}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab96}', to: '\u{ab96}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab97}', to: '\u{ab97}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab98}', to: '\u{ab98}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab99}', to: '\u{ab99}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9a}', to: '\u{ab9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9b}', to: '\u{ab9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9c}', to: '\u{ab9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9d}', to: '\u{ab9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9e}', to: '\u{ab9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{ab9f}', to: '\u{ab9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba0}', to: '\u{aba0}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba1}', to: '\u{aba1}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba2}', to: '\u{aba2}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba3}', to: '\u{aba3}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba4}', to: '\u{aba4}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba5}', to: '\u{aba5}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba6}', to: '\u{aba6}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba7}', to: '\u{aba7}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba8}', to: '\u{aba8}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{aba9}', to: '\u{aba9}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abaa}', to: '\u{abaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abab}', to: '\u{abab}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abac}', to: '\u{abac}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abad}', to: '\u{abad}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abae}', to: '\u{abae}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 25, byte_len: 3 }) }, + Range { from: '\u{abaf}', to: '\u{abaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb0}', to: '\u{abb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb1}', to: '\u{abb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb2}', to: '\u{abb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb3}', to: '\u{abb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb4}', to: '\u{abb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb5}', to: '\u{abb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb6}', to: '\u{abb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb7}', to: '\u{abb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb8}', to: '\u{abb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abb9}', to: '\u{abb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abba}', to: '\u{abba}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abbb}', to: '\u{abbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abbc}', to: '\u{abbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abbd}', to: '\u{abbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abbe}', to: '\u{abbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abbf}', to: '\u{abbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{abc0}', to: '\u{abed}', mapping: Valid }, + Range { from: '\u{abee}', to: '\u{abef}', mapping: Disallowed }, + Range { from: '\u{abf0}', to: '\u{abf9}', mapping: Valid }, + Range { from: '\u{abfa}', to: '\u{abff}', mapping: Disallowed }, + Range { from: '\u{ac00}', to: '\u{d7a3}', mapping: Valid }, + Range { from: '\u{d7a4}', to: '\u{d7af}', mapping: Disallowed }, + Range { from: '\u{d7b0}', to: '\u{d7c6}', mapping: Valid }, + Range { from: '\u{d7c7}', to: '\u{d7ca}', mapping: Disallowed }, + Range { from: '\u{d7cb}', to: '\u{d7fb}', mapping: Valid }, + Range { from: '\u{d7fc}', to: '\u{f8ff}', mapping: Disallowed }, + Range { from: '\u{f900}', to: '\u{f900}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f901}', to: '\u{f901}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f902}', to: '\u{f902}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f903}', to: '\u{f903}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f904}', to: '\u{f904}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f905}', to: '\u{f905}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f906}', to: '\u{f906}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f907}', to: '\u{f908}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f909}', to: '\u{f909}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f90a}', to: '\u{f90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f90b}', to: '\u{f90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f90c}', to: '\u{f90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f90d}', to: '\u{f90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f90e}', to: '\u{f90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f90f}', to: '\u{f90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f910}', to: '\u{f910}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f911}', to: '\u{f911}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f912}', to: '\u{f912}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f913}', to: '\u{f913}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f914}', to: '\u{f914}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f915}', to: '\u{f915}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f916}', to: '\u{f916}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f917}', to: '\u{f917}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f918}', to: '\u{f918}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f919}', to: '\u{f919}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91a}', to: '\u{f91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91b}', to: '\u{f91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91c}', to: '\u{f91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91d}', to: '\u{f91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91e}', to: '\u{f91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f91f}', to: '\u{f91f}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f920}', to: '\u{f920}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f921}', to: '\u{f921}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f922}', to: '\u{f922}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f923}', to: '\u{f923}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f924}', to: '\u{f924}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f925}', to: '\u{f925}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f926}', to: '\u{f926}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f927}', to: '\u{f927}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f928}', to: '\u{f928}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f929}', to: '\u{f929}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92a}', to: '\u{f92a}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92b}', to: '\u{f92b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92c}', to: '\u{f92c}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92d}', to: '\u{f92d}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92e}', to: '\u{f92e}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f92f}', to: '\u{f92f}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f930}', to: '\u{f930}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f931}', to: '\u{f931}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f932}', to: '\u{f932}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f933}', to: '\u{f933}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f934}', to: '\u{f934}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{f935}', to: '\u{f935}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f936}', to: '\u{f936}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f937}', to: '\u{f937}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f938}', to: '\u{f938}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f939}', to: '\u{f939}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93a}', to: '\u{f93a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93b}', to: '\u{f93b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93c}', to: '\u{f93c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93d}', to: '\u{f93d}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93e}', to: '\u{f93e}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f93f}', to: '\u{f93f}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f940}', to: '\u{f940}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f941}', to: '\u{f941}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f942}', to: '\u{f942}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f943}', to: '\u{f943}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f944}', to: '\u{f944}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f945}', to: '\u{f945}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f946}', to: '\u{f946}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f947}', to: '\u{f947}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f948}', to: '\u{f948}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f949}', to: '\u{f949}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f94a}', to: '\u{f94a}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f94b}', to: '\u{f94b}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f94c}', to: '\u{f94c}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f94d}', to: '\u{f94d}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f94e}', to: '\u{f94e}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f94f}', to: '\u{f94f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f950}', to: '\u{f950}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f951}', to: '\u{f951}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f952}', to: '\u{f952}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f953}', to: '\u{f953}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f954}', to: '\u{f954}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f955}', to: '\u{f955}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f956}', to: '\u{f956}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f957}', to: '\u{f957}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f958}', to: '\u{f958}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f959}', to: '\u{f959}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f95a}', to: '\u{f95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f95b}', to: '\u{f95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f95c}', to: '\u{f95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f95d}', to: '\u{f95d}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f95e}', to: '\u{f95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f95f}', to: '\u{f95f}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f960}', to: '\u{f960}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f961}', to: '\u{f961}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f962}', to: '\u{f962}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f963}', to: '\u{f963}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f964}', to: '\u{f964}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f965}', to: '\u{f965}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f966}', to: '\u{f966}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f967}', to: '\u{f967}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f968}', to: '\u{f968}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f969}', to: '\u{f969}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96a}', to: '\u{f96a}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96b}', to: '\u{f96b}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96c}', to: '\u{f96c}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96d}', to: '\u{f96d}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96e}', to: '\u{f96e}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f96f}', to: '\u{f96f}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f970}', to: '\u{f970}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f971}', to: '\u{f971}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f972}', to: '\u{f972}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f973}', to: '\u{f973}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f974}', to: '\u{f974}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f975}', to: '\u{f975}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f976}', to: '\u{f976}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f977}', to: '\u{f977}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f978}', to: '\u{f978}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f979}', to: '\u{f979}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97a}', to: '\u{f97a}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97b}', to: '\u{f97b}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97c}', to: '\u{f97c}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97d}', to: '\u{f97d}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97e}', to: '\u{f97e}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f97f}', to: '\u{f97f}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f980}', to: '\u{f980}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f981}', to: '\u{f981}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{f982}', to: '\u{f982}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f983}', to: '\u{f983}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f984}', to: '\u{f984}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f985}', to: '\u{f985}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f986}', to: '\u{f986}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f987}', to: '\u{f987}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f988}', to: '\u{f988}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f989}', to: '\u{f989}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f98a}', to: '\u{f98a}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{f98b}', to: '\u{f98b}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f98c}', to: '\u{f98c}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f98d}', to: '\u{f98d}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f98e}', to: '\u{f98e}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f98f}', to: '\u{f98f}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f990}', to: '\u{f990}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f991}', to: '\u{f991}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f992}', to: '\u{f992}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f993}', to: '\u{f993}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f994}', to: '\u{f994}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f995}', to: '\u{f995}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f996}', to: '\u{f996}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f997}', to: '\u{f997}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f998}', to: '\u{f998}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f999}', to: '\u{f999}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99a}', to: '\u{f99a}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99b}', to: '\u{f99b}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99c}', to: '\u{f99c}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99d}', to: '\u{f99d}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99e}', to: '\u{f99e}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f99f}', to: '\u{f99f}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a0}', to: '\u{f9a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a1}', to: '\u{f9a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a2}', to: '\u{f9a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a3}', to: '\u{f9a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a4}', to: '\u{f9a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9a5}', to: '\u{f9a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9a6}', to: '\u{f9a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9a7}', to: '\u{f9a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9a8}', to: '\u{f9a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9a9}', to: '\u{f9a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9aa}', to: '\u{f9aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9ab}', to: '\u{f9ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ac}', to: '\u{f9ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ad}', to: '\u{f9ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ae}', to: '\u{f9ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9af}', to: '\u{f9af}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b0}', to: '\u{f9b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b1}', to: '\u{f9b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b2}', to: '\u{f9b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b3}', to: '\u{f9b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b4}', to: '\u{f9b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b5}', to: '\u{f9b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b6}', to: '\u{f9b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b7}', to: '\u{f9b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b8}', to: '\u{f9b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9b9}', to: '\u{f9b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ba}', to: '\u{f9ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9bb}', to: '\u{f9bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9bc}', to: '\u{f9bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9bd}', to: '\u{f9bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9be}', to: '\u{f9be}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9bf}', to: '\u{f9bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{f9c0}', to: '\u{f9c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c1}', to: '\u{f9c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c2}', to: '\u{f9c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c3}', to: '\u{f9c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c4}', to: '\u{f9c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f9c5}', to: '\u{f9c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c6}', to: '\u{f9c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c7}', to: '\u{f9c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c8}', to: '\u{f9c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9c9}', to: '\u{f9c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ca}', to: '\u{f9ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9cb}', to: '\u{f9cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9cc}', to: '\u{f9cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9cd}', to: '\u{f9cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ce}', to: '\u{f9ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9cf}', to: '\u{f9cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d0}', to: '\u{f9d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d1}', to: '\u{f9d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{f9d2}', to: '\u{f9d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d3}', to: '\u{f9d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d4}', to: '\u{f9d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d5}', to: '\u{f9d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d6}', to: '\u{f9d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d7}', to: '\u{f9d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d8}', to: '\u{f9d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9d9}', to: '\u{f9d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9da}', to: '\u{f9da}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9db}', to: '\u{f9db}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{f9dc}', to: '\u{f9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9dd}', to: '\u{f9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9de}', to: '\u{f9de}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9df}', to: '\u{f9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e0}', to: '\u{f9e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e1}', to: '\u{f9e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e2}', to: '\u{f9e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e3}', to: '\u{f9e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e4}', to: '\u{f9e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e5}', to: '\u{f9e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e6}', to: '\u{f9e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e7}', to: '\u{f9e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e8}', to: '\u{f9e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9e9}', to: '\u{f9e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{f9ea}', to: '\u{f9ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9eb}', to: '\u{f9eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ec}', to: '\u{f9ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ed}', to: '\u{f9ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ee}', to: '\u{f9ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ef}', to: '\u{f9ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f0}', to: '\u{f9f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f1}', to: '\u{f9f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f2}', to: '\u{f9f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f3}', to: '\u{f9f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f4}', to: '\u{f9f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f5}', to: '\u{f9f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f6}', to: '\u{f9f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f7}', to: '\u{f9f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{f9f8}', to: '\u{f9f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9f9}', to: '\u{f9f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9fa}', to: '\u{f9fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9fb}', to: '\u{f9fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9fc}', to: '\u{f9fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9fd}', to: '\u{f9fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9fe}', to: '\u{f9fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{f9ff}', to: '\u{f9ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{fa00}', to: '\u{fa00}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{fa01}', to: '\u{fa01}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa02}', to: '\u{fa02}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa03}', to: '\u{fa03}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa04}', to: '\u{fa04}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa05}', to: '\u{fa05}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa06}', to: '\u{fa06}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa07}', to: '\u{fa07}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa08}', to: '\u{fa08}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{fa09}', to: '\u{fa09}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa0a}', to: '\u{fa0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{fa0b}', to: '\u{fa0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa0c}', to: '\u{fa0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa0d}', to: '\u{fa0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa0e}', to: '\u{fa0f}', mapping: Valid }, + Range { from: '\u{fa10}', to: '\u{fa10}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa11}', to: '\u{fa11}', mapping: Valid }, + Range { from: '\u{fa12}', to: '\u{fa12}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa13}', to: '\u{fa14}', mapping: Valid }, + Range { from: '\u{fa15}', to: '\u{fa15}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa16}', to: '\u{fa16}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa17}', to: '\u{fa17}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa18}', to: '\u{fa18}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa19}', to: '\u{fa19}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa1a}', to: '\u{fa1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa1b}', to: '\u{fa1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa1c}', to: '\u{fa1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa1d}', to: '\u{fa1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa1e}', to: '\u{fa1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{fa1f}', to: '\u{fa1f}', mapping: Valid }, + Range { from: '\u{fa20}', to: '\u{fa20}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa21}', to: '\u{fa21}', mapping: Valid }, + Range { from: '\u{fa22}', to: '\u{fa22}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa23}', to: '\u{fa24}', mapping: Valid }, + Range { from: '\u{fa25}', to: '\u{fa25}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa26}', to: '\u{fa26}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa27}', to: '\u{fa29}', mapping: Valid }, + Range { from: '\u{fa2a}', to: '\u{fa2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa2b}', to: '\u{fa2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa2c}', to: '\u{fa2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa2d}', to: '\u{fa2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa2e}', to: '\u{fa2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa2f}', to: '\u{fa2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa30}', to: '\u{fa30}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa31}', to: '\u{fa31}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa32}', to: '\u{fa32}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa33}', to: '\u{fa33}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa34}', to: '\u{fa34}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa35}', to: '\u{fa35}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa36}', to: '\u{fa36}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa37}', to: '\u{fa37}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa38}', to: '\u{fa38}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa39}', to: '\u{fa39}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa3a}', to: '\u{fa3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa3b}', to: '\u{fa3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa3c}', to: '\u{fa3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{fa3d}', to: '\u{fa3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa3e}', to: '\u{fa3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa3f}', to: '\u{fa3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa40}', to: '\u{fa40}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa41}', to: '\u{fa41}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa42}', to: '\u{fa42}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa43}', to: '\u{fa43}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa44}', to: '\u{fa44}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa45}', to: '\u{fa45}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa46}', to: '\u{fa46}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa47}', to: '\u{fa47}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa48}', to: '\u{fa48}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa49}', to: '\u{fa49}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa4a}', to: '\u{fa4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa4b}', to: '\u{fa4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa4c}', to: '\u{fa4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{fa4d}', to: '\u{fa4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa4e}', to: '\u{fa4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa4f}', to: '\u{fa4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa50}', to: '\u{fa50}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa51}', to: '\u{fa51}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{fa52}', to: '\u{fa52}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa53}', to: '\u{fa53}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa54}', to: '\u{fa54}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa55}', to: '\u{fa55}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa56}', to: '\u{fa56}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa57}', to: '\u{fa57}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{fa58}', to: '\u{fa58}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa59}', to: '\u{fa59}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa5a}', to: '\u{fa5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa5b}', to: '\u{fa5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa5c}', to: '\u{fa5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa5d}', to: '\u{fa5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa5f}', to: '\u{fa5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa60}', to: '\u{fa60}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa61}', to: '\u{fa61}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa62}', to: '\u{fa62}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa63}', to: '\u{fa63}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa64}', to: '\u{fa64}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa65}', to: '\u{fa65}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa66}', to: '\u{fa66}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa67}', to: '\u{fa67}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa68}', to: '\u{fa68}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa69}', to: '\u{fa69}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa6a}', to: '\u{fa6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa6b}', to: '\u{fa6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa6c}', to: '\u{fa6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fa6d}', to: '\u{fa6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa6e}', to: '\u{fa6f}', mapping: Disallowed }, + Range { from: '\u{fa70}', to: '\u{fa70}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa71}', to: '\u{fa71}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa72}', to: '\u{fa72}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa73}', to: '\u{fa73}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa74}', to: '\u{fa74}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa75}', to: '\u{fa75}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa76}', to: '\u{fa76}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa77}', to: '\u{fa77}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa78}', to: '\u{fa78}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa79}', to: '\u{fa79}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa7a}', to: '\u{fa7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa7b}', to: '\u{fa7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa7c}', to: '\u{fa7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa7d}', to: '\u{fa7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa7e}', to: '\u{fa7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa7f}', to: '\u{fa7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa80}', to: '\u{fa80}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa81}', to: '\u{fa81}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa82}', to: '\u{fa82}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa83}', to: '\u{fa83}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa84}', to: '\u{fa84}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa85}', to: '\u{fa85}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa86}', to: '\u{fa86}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa87}', to: '\u{fa87}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa88}', to: '\u{fa88}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa89}', to: '\u{fa89}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa8a}', to: '\u{fa8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa8b}', to: '\u{fa8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa8c}', to: '\u{fa8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa8d}', to: '\u{fa8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa8e}', to: '\u{fa8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa8f}', to: '\u{fa8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa90}', to: '\u{fa90}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa91}', to: '\u{fa91}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa92}', to: '\u{fa92}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{fa93}', to: '\u{fa93}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa94}', to: '\u{fa94}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa95}', to: '\u{fa95}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{fa96}', to: '\u{fa96}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{fa97}', to: '\u{fa97}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{fa98}', to: '\u{fa98}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa99}', to: '\u{fa99}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa9a}', to: '\u{fa9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa9b}', to: '\u{fa9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa9c}', to: '\u{fa9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fa9d}', to: '\u{fa9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa9e}', to: '\u{fa9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fa9f}', to: '\u{fa9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa0}', to: '\u{faa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{faa1}', to: '\u{faa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa2}', to: '\u{faa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa3}', to: '\u{faa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa4}', to: '\u{faa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa5}', to: '\u{faa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa6}', to: '\u{faa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{faa7}', to: '\u{faa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa8}', to: '\u{faa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faa9}', to: '\u{faa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faaa}', to: '\u{faaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faab}', to: '\u{faab}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faac}', to: '\u{faac}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faad}', to: '\u{faad}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{faae}', to: '\u{faae}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faaf}', to: '\u{faaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab0}', to: '\u{fab0}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{fab1}', to: '\u{fab1}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab2}', to: '\u{fab2}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fab3}', to: '\u{fab3}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab4}', to: '\u{fab4}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab5}', to: '\u{fab5}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab6}', to: '\u{fab6}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab7}', to: '\u{fab7}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fab8}', to: '\u{fab8}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fab9}', to: '\u{fab9}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faba}', to: '\u{faba}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fabb}', to: '\u{fabb}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fabc}', to: '\u{fabc}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fabd}', to: '\u{fabd}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{fabe}', to: '\u{fabe}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fabf}', to: '\u{fabf}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fac0}', to: '\u{fac0}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac1}', to: '\u{fac1}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fac2}', to: '\u{fac2}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac3}', to: '\u{fac3}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac4}', to: '\u{fac4}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac5}', to: '\u{fac5}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac6}', to: '\u{fac6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fac7}', to: '\u{fac7}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fac8}', to: '\u{fac8}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{fac9}', to: '\u{fac9}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{faca}', to: '\u{faca}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{facb}', to: '\u{facb}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{facc}', to: '\u{facc}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{facd}', to: '\u{facd}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{face}', to: '\u{face}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{facf}', to: '\u{facf}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad0}', to: '\u{fad0}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad1}', to: '\u{fad1}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad2}', to: '\u{fad2}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fad3}', to: '\u{fad3}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fad4}', to: '\u{fad4}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fad5}', to: '\u{fad5}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad6}', to: '\u{fad6}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad7}', to: '\u{fad7}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 30, byte_len: 4 }) }, + Range { from: '\u{fad8}', to: '\u{fad8}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fad9}', to: '\u{fad9}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{fada}', to: '\u{faff}', mapping: Disallowed }, + Range { from: '\u{fb00}', to: '\u{fb00}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 30, byte_len: 2 }) }, + Range { from: '\u{fb01}', to: '\u{fb01}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 30, byte_len: 2 }) }, + Range { from: '\u{fb02}', to: '\u{fb02}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb03}', to: '\u{fb03}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 31, byte_len: 3 }) }, + Range { from: '\u{fb04}', to: '\u{fb04}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 31, byte_len: 3 }) }, + Range { from: '\u{fb05}', to: '\u{fb06}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb07}', to: '\u{fb12}', mapping: Disallowed }, + Range { from: '\u{fb13}', to: '\u{fb13}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb14}', to: '\u{fb14}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb15}', to: '\u{fb15}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb16}', to: '\u{fb16}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb17}', to: '\u{fb17}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb18}', to: '\u{fb1c}', mapping: Disallowed }, + Range { from: '\u{fb1d}', to: '\u{fb1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb1e}', to: '\u{fb1e}', mapping: Valid }, + Range { from: '\u{fb1f}', to: '\u{fb1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb20}', to: '\u{fb20}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb21}', to: '\u{fb21}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{fb22}', to: '\u{fb22}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }) }, + Range { from: '\u{fb23}', to: '\u{fb23}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb24}', to: '\u{fb24}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb25}', to: '\u{fb25}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb26}', to: '\u{fb26}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb27}', to: '\u{fb27}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb28}', to: '\u{fb28}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb29}', to: '\u{fb29}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fb2a}', to: '\u{fb2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb2b}', to: '\u{fb2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb2c}', to: '\u{fb2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 31, byte_len: 6 }) }, + Range { from: '\u{fb2d}', to: '\u{fb2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 31, byte_len: 6 }) }, + Range { from: '\u{fb2e}', to: '\u{fb2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb2f}', to: '\u{fb2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb30}', to: '\u{fb30}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb31}', to: '\u{fb31}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb32}', to: '\u{fb32}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb33}', to: '\u{fb33}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb34}', to: '\u{fb34}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb35}', to: '\u{fb35}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb36}', to: '\u{fb36}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb37}', to: '\u{fb37}', mapping: Disallowed }, + Range { from: '\u{fb38}', to: '\u{fb38}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb39}', to: '\u{fb39}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb3a}', to: '\u{fb3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb3b}', to: '\u{fb3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb3c}', to: '\u{fb3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb3d}', to: '\u{fb3d}', mapping: Disallowed }, + Range { from: '\u{fb3e}', to: '\u{fb3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb3f}', to: '\u{fb3f}', mapping: Disallowed }, + Range { from: '\u{fb40}', to: '\u{fb40}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb41}', to: '\u{fb41}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb42}', to: '\u{fb42}', mapping: Disallowed }, + Range { from: '\u{fb43}', to: '\u{fb43}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb44}', to: '\u{fb44}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb45}', to: '\u{fb45}', mapping: Disallowed }, + Range { from: '\u{fb46}', to: '\u{fb46}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb47}', to: '\u{fb47}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb48}', to: '\u{fb48}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb49}', to: '\u{fb49}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4a}', to: '\u{fb4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4b}', to: '\u{fb4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4c}', to: '\u{fb4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4d}', to: '\u{fb4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4e}', to: '\u{fb4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb4f}', to: '\u{fb4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 31, byte_len: 4 }) }, + Range { from: '\u{fb50}', to: '\u{fb51}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb52}', to: '\u{fb55}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb56}', to: '\u{fb59}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb5a}', to: '\u{fb5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb5e}', to: '\u{fb61}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb62}', to: '\u{fb65}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb66}', to: '\u{fb69}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb6a}', to: '\u{fb6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb6e}', to: '\u{fb71}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb72}', to: '\u{fb75}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb76}', to: '\u{fb79}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb7a}', to: '\u{fb7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb7e}', to: '\u{fb81}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb82}', to: '\u{fb83}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb84}', to: '\u{fb85}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb86}', to: '\u{fb87}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb88}', to: '\u{fb89}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb8a}', to: '\u{fb8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb8c}', to: '\u{fb8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb8e}', to: '\u{fb91}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb92}', to: '\u{fb95}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb96}', to: '\u{fb99}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb9a}', to: '\u{fb9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fb9e}', to: '\u{fb9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fba0}', to: '\u{fba3}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fba4}', to: '\u{fba5}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fba6}', to: '\u{fba9}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbaa}', to: '\u{fbad}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbae}', to: '\u{fbaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbb0}', to: '\u{fbb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbb2}', to: '\u{fbc1}', mapping: Valid }, + Range { from: '\u{fbc2}', to: '\u{fbd2}', mapping: Disallowed }, + Range { from: '\u{fbd3}', to: '\u{fbd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbd7}', to: '\u{fbd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbd9}', to: '\u{fbda}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbdb}', to: '\u{fbdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{fbdd}', to: '\u{fbdd}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }) }, + Range { from: '\u{fbde}', to: '\u{fbdf}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fbe0}', to: '\u{fbe1}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fbe2}', to: '\u{fbe3}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fbe4}', to: '\u{fbe7}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fbe8}', to: '\u{fbe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fbea}', to: '\u{fbeb}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbec}', to: '\u{fbed}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbee}', to: '\u{fbef}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbf0}', to: '\u{fbf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbf2}', to: '\u{fbf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbf4}', to: '\u{fbf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbf6}', to: '\u{fbf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbf9}', to: '\u{fbfb}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fbfc}', to: '\u{fbff}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fc00}', to: '\u{fc00}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc01}', to: '\u{fc01}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc02}', to: '\u{fc02}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc03}', to: '\u{fc03}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc04}', to: '\u{fc04}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc05}', to: '\u{fc05}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc06}', to: '\u{fc06}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc07}', to: '\u{fc07}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc08}', to: '\u{fc08}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc09}', to: '\u{fc09}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0a}', to: '\u{fc0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0b}', to: '\u{fc0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0c}', to: '\u{fc0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0d}', to: '\u{fc0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0e}', to: '\u{fc0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc0f}', to: '\u{fc0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc10}', to: '\u{fc10}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc11}', to: '\u{fc11}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc12}', to: '\u{fc12}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc13}', to: '\u{fc13}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc14}', to: '\u{fc14}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc15}', to: '\u{fc15}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc16}', to: '\u{fc16}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc17}', to: '\u{fc17}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc18}', to: '\u{fc18}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc19}', to: '\u{fc19}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1a}', to: '\u{fc1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1b}', to: '\u{fc1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1c}', to: '\u{fc1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1d}', to: '\u{fc1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1e}', to: '\u{fc1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc1f}', to: '\u{fc1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc20}', to: '\u{fc20}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc21}', to: '\u{fc21}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc22}', to: '\u{fc22}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc23}', to: '\u{fc23}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc24}', to: '\u{fc24}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc25}', to: '\u{fc25}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc26}', to: '\u{fc26}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc27}', to: '\u{fc27}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc28}', to: '\u{fc28}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc29}', to: '\u{fc29}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2a}', to: '\u{fc2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2b}', to: '\u{fc2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2c}', to: '\u{fc2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2d}', to: '\u{fc2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2e}', to: '\u{fc2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc2f}', to: '\u{fc2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc30}', to: '\u{fc30}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc31}', to: '\u{fc31}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc32}', to: '\u{fc32}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc33}', to: '\u{fc33}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc34}', to: '\u{fc34}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc35}', to: '\u{fc35}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc36}', to: '\u{fc36}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc37}', to: '\u{fc37}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc38}', to: '\u{fc38}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc39}', to: '\u{fc39}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3a}', to: '\u{fc3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3b}', to: '\u{fc3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3c}', to: '\u{fc3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3d}', to: '\u{fc3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3e}', to: '\u{fc3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc3f}', to: '\u{fc3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc40}', to: '\u{fc40}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc41}', to: '\u{fc41}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc42}', to: '\u{fc42}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc43}', to: '\u{fc43}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc44}', to: '\u{fc44}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc45}', to: '\u{fc45}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc46}', to: '\u{fc46}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc47}', to: '\u{fc47}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc48}', to: '\u{fc48}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc49}', to: '\u{fc49}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4a}', to: '\u{fc4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4b}', to: '\u{fc4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4c}', to: '\u{fc4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4d}', to: '\u{fc4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4e}', to: '\u{fc4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc4f}', to: '\u{fc4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc50}', to: '\u{fc50}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc51}', to: '\u{fc51}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc52}', to: '\u{fc52}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc53}', to: '\u{fc53}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc54}', to: '\u{fc54}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc55}', to: '\u{fc55}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc56}', to: '\u{fc56}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc57}', to: '\u{fc57}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc58}', to: '\u{fc58}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc59}', to: '\u{fc59}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc5a}', to: '\u{fc5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc5b}', to: '\u{fc5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc5c}', to: '\u{fc5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc5d}', to: '\u{fc5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc5e}', to: '\u{fc5e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc5f}', to: '\u{fc5f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc60}', to: '\u{fc60}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc61}', to: '\u{fc61}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc62}', to: '\u{fc62}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc63}', to: '\u{fc63}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 33, byte_len: 5 }) }, + Range { from: '\u{fc64}', to: '\u{fc64}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc65}', to: '\u{fc65}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc66}', to: '\u{fc66}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc67}', to: '\u{fc67}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc68}', to: '\u{fc68}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc69}', to: '\u{fc69}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc6a}', to: '\u{fc6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc6b}', to: '\u{fc6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc6c}', to: '\u{fc6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc6d}', to: '\u{fc6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc6e}', to: '\u{fc6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc6f}', to: '\u{fc6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc70}', to: '\u{fc70}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc71}', to: '\u{fc71}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc72}', to: '\u{fc72}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc73}', to: '\u{fc73}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc74}', to: '\u{fc74}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc75}', to: '\u{fc75}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc76}', to: '\u{fc76}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc77}', to: '\u{fc77}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc78}', to: '\u{fc78}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc79}', to: '\u{fc79}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc7a}', to: '\u{fc7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc7b}', to: '\u{fc7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc7c}', to: '\u{fc7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc7d}', to: '\u{fc7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc7e}', to: '\u{fc7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc7f}', to: '\u{fc7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc80}', to: '\u{fc80}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc81}', to: '\u{fc81}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc82}', to: '\u{fc82}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc83}', to: '\u{fc83}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc84}', to: '\u{fc84}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc85}', to: '\u{fc85}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc86}', to: '\u{fc86}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc87}', to: '\u{fc87}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc88}', to: '\u{fc88}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc89}', to: '\u{fc89}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8a}', to: '\u{fc8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8b}', to: '\u{fc8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8c}', to: '\u{fc8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8d}', to: '\u{fc8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8e}', to: '\u{fc8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc8f}', to: '\u{fc8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc90}', to: '\u{fc90}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc91}', to: '\u{fc91}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc92}', to: '\u{fc92}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fc93}', to: '\u{fc93}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc94}', to: '\u{fc94}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fc95}', to: '\u{fc95}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc96}', to: '\u{fc96}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fc97}', to: '\u{fc97}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc98}', to: '\u{fc98}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc99}', to: '\u{fc99}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fc9a}', to: '\u{fc9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc9b}', to: '\u{fc9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fc9c}', to: '\u{fc9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc9d}', to: '\u{fc9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc9e}', to: '\u{fc9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fc9f}', to: '\u{fc9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca0}', to: '\u{fca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fca1}', to: '\u{fca1}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca2}', to: '\u{fca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca3}', to: '\u{fca3}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca4}', to: '\u{fca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca5}', to: '\u{fca5}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fca6}', to: '\u{fca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca7}', to: '\u{fca7}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca8}', to: '\u{fca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fca9}', to: '\u{fca9}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcaa}', to: '\u{fcaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcab}', to: '\u{fcab}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcac}', to: '\u{fcac}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcad}', to: '\u{fcad}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcae}', to: '\u{fcae}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcaf}', to: '\u{fcaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb0}', to: '\u{fcb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb1}', to: '\u{fcb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb2}', to: '\u{fcb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcb3}', to: '\u{fcb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb4}', to: '\u{fcb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb5}', to: '\u{fcb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb6}', to: '\u{fcb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb7}', to: '\u{fcb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb8}', to: '\u{fcb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcb9}', to: '\u{fcb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcba}', to: '\u{fcba}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcbb}', to: '\u{fcbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcbc}', to: '\u{fcbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcbd}', to: '\u{fcbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcbe}', to: '\u{fcbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcbf}', to: '\u{fcbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcc0}', to: '\u{fcc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcc1}', to: '\u{fcc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcc2}', to: '\u{fcc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcc3}', to: '\u{fcc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fcc4}', to: '\u{fcc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcc5}', to: '\u{fcc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcc6}', to: '\u{fcc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcc7}', to: '\u{fcc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcc8}', to: '\u{fcc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcc9}', to: '\u{fcc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcca}', to: '\u{fcca}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fccb}', to: '\u{fccb}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fccc}', to: '\u{fccc}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fccd}', to: '\u{fccd}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcce}', to: '\u{fcce}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fccf}', to: '\u{fccf}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd0}', to: '\u{fcd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd1}', to: '\u{fcd1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd2}', to: '\u{fcd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd3}', to: '\u{fcd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd4}', to: '\u{fcd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd5}', to: '\u{fcd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd6}', to: '\u{fcd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcd7}', to: '\u{fcd7}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd8}', to: '\u{fcd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcd9}', to: '\u{fcd9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcda}', to: '\u{fcda}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcdb}', to: '\u{fcdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcdc}', to: '\u{fcdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcdd}', to: '\u{fcdd}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcde}', to: '\u{fcde}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcdf}', to: '\u{fcdf}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fce0}', to: '\u{fce0}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fce1}', to: '\u{fce1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fce2}', to: '\u{fce2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fce3}', to: '\u{fce3}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fce4}', to: '\u{fce4}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fce5}', to: '\u{fce5}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fce6}', to: '\u{fce6}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fce7}', to: '\u{fce7}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fce8}', to: '\u{fce8}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fce9}', to: '\u{fce9}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcea}', to: '\u{fcea}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fceb}', to: '\u{fceb}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcec}', to: '\u{fcec}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fced}', to: '\u{fced}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcee}', to: '\u{fcee}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcef}', to: '\u{fcef}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf0}', to: '\u{fcf0}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, + Range { from: '\u{fcf1}', to: '\u{fcf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf2}', to: '\u{fcf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fcf3}', to: '\u{fcf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fcf4}', to: '\u{fcf4}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fcf5}', to: '\u{fcf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf6}', to: '\u{fcf6}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf7}', to: '\u{fcf7}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf8}', to: '\u{fcf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcf9}', to: '\u{fcf9}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcfa}', to: '\u{fcfa}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcfb}', to: '\u{fcfb}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcfc}', to: '\u{fcfc}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcfd}', to: '\u{fcfd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcfe}', to: '\u{fcfe}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fcff}', to: '\u{fcff}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd00}', to: '\u{fd00}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd01}', to: '\u{fd01}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd02}', to: '\u{fd02}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd03}', to: '\u{fd03}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd04}', to: '\u{fd04}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd05}', to: '\u{fd05}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd06}', to: '\u{fd06}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd07}', to: '\u{fd07}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd08}', to: '\u{fd08}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd09}', to: '\u{fd09}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0a}', to: '\u{fd0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0b}', to: '\u{fd0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0c}', to: '\u{fd0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0d}', to: '\u{fd0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0e}', to: '\u{fd0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd0f}', to: '\u{fd0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd10}', to: '\u{fd10}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd11}', to: '\u{fd11}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd12}', to: '\u{fd12}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd13}', to: '\u{fd13}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd14}', to: '\u{fd14}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd15}', to: '\u{fd15}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd16}', to: '\u{fd16}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd17}', to: '\u{fd17}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd18}', to: '\u{fd18}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd19}', to: '\u{fd19}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1a}', to: '\u{fd1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1b}', to: '\u{fd1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1c}', to: '\u{fd1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1d}', to: '\u{fd1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1e}', to: '\u{fd1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd1f}', to: '\u{fd1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd20}', to: '\u{fd20}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd21}', to: '\u{fd21}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd22}', to: '\u{fd22}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd23}', to: '\u{fd23}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd24}', to: '\u{fd24}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd25}', to: '\u{fd25}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd26}', to: '\u{fd26}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd27}', to: '\u{fd27}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd28}', to: '\u{fd28}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd29}', to: '\u{fd29}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2a}', to: '\u{fd2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2b}', to: '\u{fd2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2c}', to: '\u{fd2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2d}', to: '\u{fd2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2e}', to: '\u{fd2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd2f}', to: '\u{fd2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd30}', to: '\u{fd30}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd31}', to: '\u{fd31}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd32}', to: '\u{fd32}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd33}', to: '\u{fd33}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd34}', to: '\u{fd34}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd35}', to: '\u{fd35}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd36}', to: '\u{fd36}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd37}', to: '\u{fd37}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd38}', to: '\u{fd38}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd39}', to: '\u{fd39}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd3a}', to: '\u{fd3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd3b}', to: '\u{fd3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, + Range { from: '\u{fd3c}', to: '\u{fd3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 34, byte_len: 4 }) }, + Range { from: '\u{fd3e}', to: '\u{fd3f}', mapping: Valid }, + Range { from: '\u{fd40}', to: '\u{fd4f}', mapping: Disallowed }, + Range { from: '\u{fd50}', to: '\u{fd50}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd51}', to: '\u{fd52}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd53}', to: '\u{fd53}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd54}', to: '\u{fd54}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd55}', to: '\u{fd55}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd56}', to: '\u{fd56}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd57}', to: '\u{fd57}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd58}', to: '\u{fd59}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd5a}', to: '\u{fd5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd5b}', to: '\u{fd5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd5c}', to: '\u{fd5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 34, byte_len: 6 }) }, + Range { from: '\u{fd5d}', to: '\u{fd5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd5e}', to: '\u{fd5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd5f}', to: '\u{fd60}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd61}', to: '\u{fd61}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd62}', to: '\u{fd63}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd64}', to: '\u{fd65}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd66}', to: '\u{fd66}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd67}', to: '\u{fd68}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd69}', to: '\u{fd69}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd6a}', to: '\u{fd6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd6c}', to: '\u{fd6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd6e}', to: '\u{fd6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd6f}', to: '\u{fd70}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd71}', to: '\u{fd72}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd73}', to: '\u{fd73}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd74}', to: '\u{fd74}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd75}', to: '\u{fd75}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd76}', to: '\u{fd77}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd78}', to: '\u{fd78}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd79}', to: '\u{fd79}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd7a}', to: '\u{fd7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd7b}', to: '\u{fd7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd7c}', to: '\u{fd7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd7e}', to: '\u{fd7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd7f}', to: '\u{fd7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd80}', to: '\u{fd80}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd81}', to: '\u{fd81}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd82}', to: '\u{fd82}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd83}', to: '\u{fd84}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd85}', to: '\u{fd86}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd87}', to: '\u{fd88}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd89}', to: '\u{fd89}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8a}', to: '\u{fd8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8b}', to: '\u{fd8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8c}', to: '\u{fd8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8d}', to: '\u{fd8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8e}', to: '\u{fd8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd8f}', to: '\u{fd8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd90}', to: '\u{fd91}', mapping: Disallowed }, + Range { from: '\u{fd92}', to: '\u{fd92}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd93}', to: '\u{fd93}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd94}', to: '\u{fd94}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd95}', to: '\u{fd95}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd96}', to: '\u{fd96}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fd97}', to: '\u{fd98}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd99}', to: '\u{fd99}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd9a}', to: '\u{fd9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd9b}', to: '\u{fd9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd9c}', to: '\u{fd9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd9e}', to: '\u{fd9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fd9f}', to: '\u{fd9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda0}', to: '\u{fda0}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda1}', to: '\u{fda1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda2}', to: '\u{fda2}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda3}', to: '\u{fda3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda4}', to: '\u{fda4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda5}', to: '\u{fda5}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda6}', to: '\u{fda6}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda7}', to: '\u{fda7}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda8}', to: '\u{fda8}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fda9}', to: '\u{fda9}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdaa}', to: '\u{fdaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdab}', to: '\u{fdab}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdac}', to: '\u{fdac}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdad}', to: '\u{fdad}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdae}', to: '\u{fdae}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdaf}', to: '\u{fdaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb0}', to: '\u{fdb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb1}', to: '\u{fdb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb2}', to: '\u{fdb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb3}', to: '\u{fdb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb4}', to: '\u{fdb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fdb5}', to: '\u{fdb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fdb6}', to: '\u{fdb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb7}', to: '\u{fdb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb8}', to: '\u{fdb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdb9}', to: '\u{fdb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdba}', to: '\u{fdba}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdbb}', to: '\u{fdbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdbc}', to: '\u{fdbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdbd}', to: '\u{fdbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdbe}', to: '\u{fdbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdbf}', to: '\u{fdbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc0}', to: '\u{fdc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc1}', to: '\u{fdc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc2}', to: '\u{fdc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc3}', to: '\u{fdc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc4}', to: '\u{fdc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fdc5}', to: '\u{fdc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }) }, + Range { from: '\u{fdc6}', to: '\u{fdc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc7}', to: '\u{fdc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdc8}', to: '\u{fdef}', mapping: Disallowed }, + Range { from: '\u{fdf0}', to: '\u{fdf0}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdf1}', to: '\u{fdf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 36, byte_len: 6 }) }, + Range { from: '\u{fdf2}', to: '\u{fdf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf3}', to: '\u{fdf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf4}', to: '\u{fdf4}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf5}', to: '\u{fdf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf6}', to: '\u{fdf6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf7}', to: '\u{fdf7}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf8}', to: '\u{fdf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdf9}', to: '\u{fdf9}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 37, byte_len: 6 }) }, + Range { from: '\u{fdfa}', to: '\u{fdfa}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 37, byte_len: 33 }) }, + Range { from: '\u{fdfb}', to: '\u{fdfb}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 37, byte_len: 15 }) }, + Range { from: '\u{fdfc}', to: '\u{fdfc}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 37, byte_len: 8 }) }, + Range { from: '\u{fdfd}', to: '\u{fdfd}', mapping: Valid }, + Range { from: '\u{fdfe}', to: '\u{fdff}', mapping: Disallowed }, + Range { from: '\u{fe00}', to: '\u{fe0f}', mapping: Ignored }, + Range { from: '\u{fe10}', to: '\u{fe10}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe11}', to: '\u{fe11}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe12}', to: '\u{fe12}', mapping: Disallowed }, + Range { from: '\u{fe13}', to: '\u{fe13}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe14}', to: '\u{fe14}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, + Range { from: '\u{fe15}', to: '\u{fe15}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe16}', to: '\u{fe16}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe17}', to: '\u{fe17}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe18}', to: '\u{fe18}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe19}', to: '\u{fe1f}', mapping: Disallowed }, + Range { from: '\u{fe20}', to: '\u{fe2f}', mapping: Valid }, + Range { from: '\u{fe30}', to: '\u{fe30}', mapping: Disallowed }, + Range { from: '\u{fe31}', to: '\u{fe31}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe32}', to: '\u{fe32}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe33}', to: '\u{fe34}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe35}', to: '\u{fe35}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe36}', to: '\u{fe36}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe37}', to: '\u{fe37}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe38}', to: '\u{fe38}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe39}', to: '\u{fe39}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3a}', to: '\u{fe3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3b}', to: '\u{fe3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3c}', to: '\u{fe3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3d}', to: '\u{fe3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3e}', to: '\u{fe3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe3f}', to: '\u{fe3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{fe40}', to: '\u{fe40}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{fe41}', to: '\u{fe41}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe42}', to: '\u{fe42}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe43}', to: '\u{fe43}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe44}', to: '\u{fe44}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe45}', to: '\u{fe46}', mapping: Valid }, + Range { from: '\u{fe47}', to: '\u{fe47}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe48}', to: '\u{fe48}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe49}', to: '\u{fe4c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }) }, + Range { from: '\u{fe4d}', to: '\u{fe4f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe50}', to: '\u{fe50}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe51}', to: '\u{fe51}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe52}', to: '\u{fe53}', mapping: Disallowed }, + Range { from: '\u{fe54}', to: '\u{fe54}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, + Range { from: '\u{fe55}', to: '\u{fe55}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe56}', to: '\u{fe56}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe57}', to: '\u{fe57}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe58}', to: '\u{fe58}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe59}', to: '\u{fe59}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe5a}', to: '\u{fe5a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe5b}', to: '\u{fe5b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe5c}', to: '\u{fe5c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe5d}', to: '\u{fe5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe5e}', to: '\u{fe5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe5f}', to: '\u{fe5f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe60}', to: '\u{fe60}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe61}', to: '\u{fe61}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe62}', to: '\u{fe62}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe63}', to: '\u{fe63}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe64}', to: '\u{fe64}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe65}', to: '\u{fe65}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe66}', to: '\u{fe66}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{fe67}', to: '\u{fe67}', mapping: Disallowed }, + Range { from: '\u{fe68}', to: '\u{fe68}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe69}', to: '\u{fe69}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe6a}', to: '\u{fe6a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe6b}', to: '\u{fe6b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{fe6c}', to: '\u{fe6f}', mapping: Disallowed }, + Range { from: '\u{fe70}', to: '\u{fe70}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe71}', to: '\u{fe71}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe72}', to: '\u{fe72}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe73}', to: '\u{fe73}', mapping: Valid }, + Range { from: '\u{fe74}', to: '\u{fe74}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe75}', to: '\u{fe75}', mapping: Disallowed }, + Range { from: '\u{fe76}', to: '\u{fe76}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe77}', to: '\u{fe77}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe78}', to: '\u{fe78}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe79}', to: '\u{fe79}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe7a}', to: '\u{fe7a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe7b}', to: '\u{fe7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe7c}', to: '\u{fe7c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe7d}', to: '\u{fe7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe7e}', to: '\u{fe7e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{fe7f}', to: '\u{fe7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 37, byte_len: 4 }) }, + Range { from: '\u{fe80}', to: '\u{fe80}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe81}', to: '\u{fe82}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe83}', to: '\u{fe84}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe85}', to: '\u{fe86}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe87}', to: '\u{fe88}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe89}', to: '\u{fe8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe8d}', to: '\u{fe8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe8f}', to: '\u{fe92}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe93}', to: '\u{fe94}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe95}', to: '\u{fe98}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe99}', to: '\u{fe9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fe9d}', to: '\u{fea0}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fea1}', to: '\u{fea4}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{fea5}', to: '\u{fea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fea9}', to: '\u{feaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feab}', to: '\u{feac}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fead}', to: '\u{feae}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feaf}', to: '\u{feb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feb1}', to: '\u{feb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feb5}', to: '\u{feb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feb9}', to: '\u{febc}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{febd}', to: '\u{fec0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fec1}', to: '\u{fec4}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fec5}', to: '\u{fec8}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fec9}', to: '\u{fecc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fecd}', to: '\u{fed0}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fed1}', to: '\u{fed4}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fed5}', to: '\u{fed8}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fed9}', to: '\u{fedc}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fedd}', to: '\u{fee0}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fee1}', to: '\u{fee4}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fee5}', to: '\u{fee8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fee9}', to: '\u{feec}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feed}', to: '\u{feee}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{feef}', to: '\u{fef0}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }) }, + Range { from: '\u{fef1}', to: '\u{fef4}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{fef5}', to: '\u{fef6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{fef7}', to: '\u{fef8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{fef9}', to: '\u{fefa}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{fefb}', to: '\u{fefc}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{fefd}', to: '\u{fefe}', mapping: Disallowed }, + Range { from: '\u{feff}', to: '\u{feff}', mapping: Ignored }, + Range { from: '\u{ff00}', to: '\u{ff00}', mapping: Disallowed }, + Range { from: '\u{ff01}', to: '\u{ff01}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff02}', to: '\u{ff02}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff03}', to: '\u{ff03}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff04}', to: '\u{ff04}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff05}', to: '\u{ff05}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff06}', to: '\u{ff06}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff07}', to: '\u{ff07}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff08}', to: '\u{ff08}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff09}', to: '\u{ff09}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff0a}', to: '\u{ff0a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff0b}', to: '\u{ff0b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff0c}', to: '\u{ff0c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff0d}', to: '\u{ff0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff0e}', to: '\u{ff0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, + Range { from: '\u{ff0f}', to: '\u{ff0f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff10}', to: '\u{ff10}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff11}', to: '\u{ff11}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff12}', to: '\u{ff12}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff13}', to: '\u{ff13}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff14}', to: '\u{ff14}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff15}', to: '\u{ff15}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff16}', to: '\u{ff16}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff17}', to: '\u{ff17}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff18}', to: '\u{ff18}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff19}', to: '\u{ff19}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff1a}', to: '\u{ff1a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff1b}', to: '\u{ff1b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, + Range { from: '\u{ff1c}', to: '\u{ff1c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff1d}', to: '\u{ff1d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff1e}', to: '\u{ff1e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff1f}', to: '\u{ff1f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff20}', to: '\u{ff20}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff21}', to: '\u{ff21}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff22}', to: '\u{ff22}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff23}', to: '\u{ff23}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff24}', to: '\u{ff24}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff25}', to: '\u{ff25}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff26}', to: '\u{ff26}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff27}', to: '\u{ff27}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff28}', to: '\u{ff28}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff29}', to: '\u{ff29}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2a}', to: '\u{ff2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2b}', to: '\u{ff2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2c}', to: '\u{ff2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2d}', to: '\u{ff2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2e}', to: '\u{ff2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff2f}', to: '\u{ff2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff30}', to: '\u{ff30}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff31}', to: '\u{ff31}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff32}', to: '\u{ff32}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff33}', to: '\u{ff33}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff34}', to: '\u{ff34}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff35}', to: '\u{ff35}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff36}', to: '\u{ff36}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff37}', to: '\u{ff37}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff38}', to: '\u{ff38}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff39}', to: '\u{ff39}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff3a}', to: '\u{ff3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff3b}', to: '\u{ff3b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff3c}', to: '\u{ff3c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff3d}', to: '\u{ff3d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff3e}', to: '\u{ff3e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff3f}', to: '\u{ff3f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff40}', to: '\u{ff40}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{ff41}', to: '\u{ff41}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff42}', to: '\u{ff42}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff43}', to: '\u{ff43}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff44}', to: '\u{ff44}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff45}', to: '\u{ff45}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff46}', to: '\u{ff46}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff47}', to: '\u{ff47}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff48}', to: '\u{ff48}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff49}', to: '\u{ff49}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4a}', to: '\u{ff4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4b}', to: '\u{ff4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4c}', to: '\u{ff4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4d}', to: '\u{ff4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4e}', to: '\u{ff4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff4f}', to: '\u{ff4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff50}', to: '\u{ff50}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff51}', to: '\u{ff51}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff52}', to: '\u{ff52}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff53}', to: '\u{ff53}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff54}', to: '\u{ff54}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff55}', to: '\u{ff55}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff56}', to: '\u{ff56}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff57}', to: '\u{ff57}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff58}', to: '\u{ff58}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff59}', to: '\u{ff59}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff5a}', to: '\u{ff5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{ff5b}', to: '\u{ff5b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff5c}', to: '\u{ff5c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff5d}', to: '\u{ff5d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, + Range { from: '\u{ff5e}', to: '\u{ff5e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 38, byte_len: 1 }) }, + Range { from: '\u{ff5f}', to: '\u{ff5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff60}', to: '\u{ff60}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff61}', to: '\u{ff61}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, + Range { from: '\u{ff62}', to: '\u{ff62}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{ff63}', to: '\u{ff63}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{ff64}', to: '\u{ff64}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, + Range { from: '\u{ff65}', to: '\u{ff65}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff66}', to: '\u{ff66}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff67}', to: '\u{ff67}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff68}', to: '\u{ff68}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff69}', to: '\u{ff69}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6a}', to: '\u{ff6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6b}', to: '\u{ff6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6c}', to: '\u{ff6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6d}', to: '\u{ff6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6e}', to: '\u{ff6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff6f}', to: '\u{ff6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff70}', to: '\u{ff70}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff71}', to: '\u{ff71}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff72}', to: '\u{ff72}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff73}', to: '\u{ff73}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff74}', to: '\u{ff74}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff75}', to: '\u{ff75}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff76}', to: '\u{ff76}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff77}', to: '\u{ff77}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff78}', to: '\u{ff78}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff79}', to: '\u{ff79}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7a}', to: '\u{ff7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7b}', to: '\u{ff7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7c}', to: '\u{ff7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7d}', to: '\u{ff7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7e}', to: '\u{ff7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff7f}', to: '\u{ff7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff80}', to: '\u{ff80}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff81}', to: '\u{ff81}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff82}', to: '\u{ff82}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{ff83}', to: '\u{ff83}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff84}', to: '\u{ff84}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff85}', to: '\u{ff85}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff86}', to: '\u{ff86}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff87}', to: '\u{ff87}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff88}', to: '\u{ff88}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff89}', to: '\u{ff89}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8a}', to: '\u{ff8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8b}', to: '\u{ff8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8c}', to: '\u{ff8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8d}', to: '\u{ff8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8e}', to: '\u{ff8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff8f}', to: '\u{ff8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff90}', to: '\u{ff90}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff91}', to: '\u{ff91}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff92}', to: '\u{ff92}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff93}', to: '\u{ff93}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff94}', to: '\u{ff94}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff95}', to: '\u{ff95}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff96}', to: '\u{ff96}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff97}', to: '\u{ff97}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff98}', to: '\u{ff98}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff99}', to: '\u{ff99}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff9a}', to: '\u{ff9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff9b}', to: '\u{ff9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff9c}', to: '\u{ff9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }) }, + Range { from: '\u{ff9d}', to: '\u{ff9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff9e}', to: '\u{ff9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ff9f}', to: '\u{ff9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffa0}', to: '\u{ffa0}', mapping: Disallowed }, + Range { from: '\u{ffa1}', to: '\u{ffa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa2}', to: '\u{ffa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa3}', to: '\u{ffa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa4}', to: '\u{ffa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa5}', to: '\u{ffa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa6}', to: '\u{ffa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa7}', to: '\u{ffa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa8}', to: '\u{ffa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffa9}', to: '\u{ffa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffaa}', to: '\u{ffaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffab}', to: '\u{ffab}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffac}', to: '\u{ffac}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffad}', to: '\u{ffad}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffae}', to: '\u{ffae}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffaf}', to: '\u{ffaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb0}', to: '\u{ffb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb1}', to: '\u{ffb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb2}', to: '\u{ffb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb3}', to: '\u{ffb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb4}', to: '\u{ffb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb5}', to: '\u{ffb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb6}', to: '\u{ffb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb7}', to: '\u{ffb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb8}', to: '\u{ffb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffb9}', to: '\u{ffb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffba}', to: '\u{ffba}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffbb}', to: '\u{ffbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffbc}', to: '\u{ffbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffbd}', to: '\u{ffbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffbe}', to: '\u{ffbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffbf}', to: '\u{ffc1}', mapping: Disallowed }, + Range { from: '\u{ffc2}', to: '\u{ffc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc3}', to: '\u{ffc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc4}', to: '\u{ffc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc5}', to: '\u{ffc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc6}', to: '\u{ffc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc7}', to: '\u{ffc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffc8}', to: '\u{ffc9}', mapping: Disallowed }, + Range { from: '\u{ffca}', to: '\u{ffca}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffcb}', to: '\u{ffcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffcc}', to: '\u{ffcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffcd}', to: '\u{ffcd}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffce}', to: '\u{ffce}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffcf}', to: '\u{ffcf}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd0}', to: '\u{ffd1}', mapping: Disallowed }, + Range { from: '\u{ffd2}', to: '\u{ffd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd3}', to: '\u{ffd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd4}', to: '\u{ffd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd5}', to: '\u{ffd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd6}', to: '\u{ffd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd7}', to: '\u{ffd7}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffd8}', to: '\u{ffd9}', mapping: Disallowed }, + Range { from: '\u{ffda}', to: '\u{ffda}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffdb}', to: '\u{ffdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffdc}', to: '\u{ffdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }) }, + Range { from: '\u{ffdd}', to: '\u{ffdf}', mapping: Disallowed }, + Range { from: '\u{ffe0}', to: '\u{ffe0}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{ffe1}', to: '\u{ffe1}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{ffe2}', to: '\u{ffe2}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{ffe3}', to: '\u{ffe3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }) }, + Range { from: '\u{ffe4}', to: '\u{ffe4}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{ffe5}', to: '\u{ffe5}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{ffe6}', to: '\u{ffe6}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffe7}', to: '\u{ffe7}', mapping: Disallowed }, + Range { from: '\u{ffe8}', to: '\u{ffe8}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffe9}', to: '\u{ffe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffea}', to: '\u{ffea}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffeb}', to: '\u{ffeb}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffec}', to: '\u{ffec}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffed}', to: '\u{ffed}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffee}', to: '\u{ffee}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 38, byte_len: 3 }) }, + Range { from: '\u{ffef}', to: '\u{ffff}', mapping: Disallowed }, + Range { from: '\u{10000}', to: '\u{1000b}', mapping: Valid }, + Range { from: '\u{1000c}', to: '\u{1000c}', mapping: Disallowed }, + Range { from: '\u{1000d}', to: '\u{10026}', mapping: Valid }, + Range { from: '\u{10027}', to: '\u{10027}', mapping: Disallowed }, + Range { from: '\u{10028}', to: '\u{1003a}', mapping: Valid }, + Range { from: '\u{1003b}', to: '\u{1003b}', mapping: Disallowed }, + Range { from: '\u{1003c}', to: '\u{1003d}', mapping: Valid }, + Range { from: '\u{1003e}', to: '\u{1003e}', mapping: Disallowed }, + Range { from: '\u{1003f}', to: '\u{1004d}', mapping: Valid }, + Range { from: '\u{1004e}', to: '\u{1004f}', mapping: Disallowed }, + Range { from: '\u{10050}', to: '\u{1005d}', mapping: Valid }, + Range { from: '\u{1005e}', to: '\u{1007f}', mapping: Disallowed }, + Range { from: '\u{10080}', to: '\u{100fa}', mapping: Valid }, + Range { from: '\u{100fb}', to: '\u{100ff}', mapping: Disallowed }, + Range { from: '\u{10100}', to: '\u{10102}', mapping: Valid }, + Range { from: '\u{10103}', to: '\u{10106}', mapping: Disallowed }, + Range { from: '\u{10107}', to: '\u{10133}', mapping: Valid }, + Range { from: '\u{10134}', to: '\u{10136}', mapping: Disallowed }, + Range { from: '\u{10137}', to: '\u{1018e}', mapping: Valid }, + Range { from: '\u{1018f}', to: '\u{1018f}', mapping: Disallowed }, + Range { from: '\u{10190}', to: '\u{1019b}', mapping: Valid }, + Range { from: '\u{1019c}', to: '\u{1019f}', mapping: Disallowed }, + Range { from: '\u{101a0}', to: '\u{101a0}', mapping: Valid }, + Range { from: '\u{101a1}', to: '\u{101cf}', mapping: Disallowed }, + Range { from: '\u{101d0}', to: '\u{101fd}', mapping: Valid }, + Range { from: '\u{101fe}', to: '\u{1027f}', mapping: Disallowed }, + Range { from: '\u{10280}', to: '\u{1029c}', mapping: Valid }, + Range { from: '\u{1029d}', to: '\u{1029f}', mapping: Disallowed }, + Range { from: '\u{102a0}', to: '\u{102d0}', mapping: Valid }, + Range { from: '\u{102d1}', to: '\u{102df}', mapping: Disallowed }, + Range { from: '\u{102e0}', to: '\u{102fb}', mapping: Valid }, + Range { from: '\u{102fc}', to: '\u{102ff}', mapping: Disallowed }, + Range { from: '\u{10300}', to: '\u{10323}', mapping: Valid }, + Range { from: '\u{10324}', to: '\u{1032f}', mapping: Disallowed }, + Range { from: '\u{10330}', to: '\u{1034a}', mapping: Valid }, + Range { from: '\u{1034b}', to: '\u{1034f}', mapping: Disallowed }, + Range { from: '\u{10350}', to: '\u{1037a}', mapping: Valid }, + Range { from: '\u{1037b}', to: '\u{1037f}', mapping: Disallowed }, + Range { from: '\u{10380}', to: '\u{1039d}', mapping: Valid }, + Range { from: '\u{1039e}', to: '\u{1039e}', mapping: Disallowed }, + Range { from: '\u{1039f}', to: '\u{103c3}', mapping: Valid }, + Range { from: '\u{103c4}', to: '\u{103c7}', mapping: Disallowed }, + Range { from: '\u{103c8}', to: '\u{103d5}', mapping: Valid }, + Range { from: '\u{103d6}', to: '\u{103ff}', mapping: Disallowed }, + Range { from: '\u{10400}', to: '\u{10400}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10401}', to: '\u{10401}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10402}', to: '\u{10402}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10403}', to: '\u{10403}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10404}', to: '\u{10404}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10405}', to: '\u{10405}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10406}', to: '\u{10406}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10407}', to: '\u{10407}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10408}', to: '\u{10408}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10409}', to: '\u{10409}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040a}', to: '\u{1040a}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040b}', to: '\u{1040b}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040c}', to: '\u{1040c}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040d}', to: '\u{1040d}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040e}', to: '\u{1040e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1040f}', to: '\u{1040f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10410}', to: '\u{10410}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10411}', to: '\u{10411}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10412}', to: '\u{10412}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10413}', to: '\u{10413}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10414}', to: '\u{10414}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10415}', to: '\u{10415}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10416}', to: '\u{10416}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10417}', to: '\u{10417}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10418}', to: '\u{10418}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{10419}', to: '\u{10419}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1041a}', to: '\u{1041a}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 38, byte_len: 4 }) }, + Range { from: '\u{1041b}', to: '\u{1041b}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{1041c}', to: '\u{1041c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{1041d}', to: '\u{1041d}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{1041e}', to: '\u{1041e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{1041f}', to: '\u{1041f}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10420}', to: '\u{10420}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10421}', to: '\u{10421}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10422}', to: '\u{10422}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10423}', to: '\u{10423}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10424}', to: '\u{10424}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10425}', to: '\u{10425}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10426}', to: '\u{10426}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10427}', to: '\u{10427}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10428}', to: '\u{1049d}', mapping: Valid }, + Range { from: '\u{1049e}', to: '\u{1049f}', mapping: Disallowed }, + Range { from: '\u{104a0}', to: '\u{104a9}', mapping: Valid }, + Range { from: '\u{104aa}', to: '\u{104af}', mapping: Disallowed }, + Range { from: '\u{104b0}', to: '\u{104b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b1}', to: '\u{104b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b2}', to: '\u{104b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b3}', to: '\u{104b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b4}', to: '\u{104b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b5}', to: '\u{104b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b6}', to: '\u{104b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b7}', to: '\u{104b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b8}', to: '\u{104b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104b9}', to: '\u{104b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104ba}', to: '\u{104ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104bb}', to: '\u{104bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104bc}', to: '\u{104bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104bd}', to: '\u{104bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104be}', to: '\u{104be}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104bf}', to: '\u{104bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c0}', to: '\u{104c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c1}', to: '\u{104c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c2}', to: '\u{104c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c3}', to: '\u{104c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c4}', to: '\u{104c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c5}', to: '\u{104c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c6}', to: '\u{104c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c7}', to: '\u{104c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c8}', to: '\u{104c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104c9}', to: '\u{104c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104ca}', to: '\u{104ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104cb}', to: '\u{104cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104cc}', to: '\u{104cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104cd}', to: '\u{104cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104ce}', to: '\u{104ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104cf}', to: '\u{104cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104d0}', to: '\u{104d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104d1}', to: '\u{104d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104d2}', to: '\u{104d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104d3}', to: '\u{104d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{104d4}', to: '\u{104d7}', mapping: Disallowed }, + Range { from: '\u{104d8}', to: '\u{104fb}', mapping: Valid }, + Range { from: '\u{104fc}', to: '\u{104ff}', mapping: Disallowed }, + Range { from: '\u{10500}', to: '\u{10527}', mapping: Valid }, + Range { from: '\u{10528}', to: '\u{1052f}', mapping: Disallowed }, + Range { from: '\u{10530}', to: '\u{10563}', mapping: Valid }, + Range { from: '\u{10564}', to: '\u{1056e}', mapping: Disallowed }, + Range { from: '\u{1056f}', to: '\u{1056f}', mapping: Valid }, + Range { from: '\u{10570}', to: '\u{105ff}', mapping: Disallowed }, + Range { from: '\u{10600}', to: '\u{10736}', mapping: Valid }, + Range { from: '\u{10737}', to: '\u{1073f}', mapping: Disallowed }, + Range { from: '\u{10740}', to: '\u{10755}', mapping: Valid }, + Range { from: '\u{10756}', to: '\u{1075f}', mapping: Disallowed }, + Range { from: '\u{10760}', to: '\u{10767}', mapping: Valid }, + Range { from: '\u{10768}', to: '\u{107ff}', mapping: Disallowed }, + Range { from: '\u{10800}', to: '\u{10805}', mapping: Valid }, + Range { from: '\u{10806}', to: '\u{10807}', mapping: Disallowed }, + Range { from: '\u{10808}', to: '\u{10808}', mapping: Valid }, + Range { from: '\u{10809}', to: '\u{10809}', mapping: Disallowed }, + Range { from: '\u{1080a}', to: '\u{10835}', mapping: Valid }, + Range { from: '\u{10836}', to: '\u{10836}', mapping: Disallowed }, + Range { from: '\u{10837}', to: '\u{10838}', mapping: Valid }, + Range { from: '\u{10839}', to: '\u{1083b}', mapping: Disallowed }, + Range { from: '\u{1083c}', to: '\u{1083c}', mapping: Valid }, + Range { from: '\u{1083d}', to: '\u{1083e}', mapping: Disallowed }, + Range { from: '\u{1083f}', to: '\u{10855}', mapping: Valid }, + Range { from: '\u{10856}', to: '\u{10856}', mapping: Disallowed }, + Range { from: '\u{10857}', to: '\u{1089e}', mapping: Valid }, + Range { from: '\u{1089f}', to: '\u{108a6}', mapping: Disallowed }, + Range { from: '\u{108a7}', to: '\u{108af}', mapping: Valid }, + Range { from: '\u{108b0}', to: '\u{108df}', mapping: Disallowed }, + Range { from: '\u{108e0}', to: '\u{108f2}', mapping: Valid }, + Range { from: '\u{108f3}', to: '\u{108f3}', mapping: Disallowed }, + Range { from: '\u{108f4}', to: '\u{108f5}', mapping: Valid }, + Range { from: '\u{108f6}', to: '\u{108fa}', mapping: Disallowed }, + Range { from: '\u{108fb}', to: '\u{1091b}', mapping: Valid }, + Range { from: '\u{1091c}', to: '\u{1091e}', mapping: Disallowed }, + Range { from: '\u{1091f}', to: '\u{10939}', mapping: Valid }, + Range { from: '\u{1093a}', to: '\u{1093e}', mapping: Disallowed }, + Range { from: '\u{1093f}', to: '\u{1093f}', mapping: Valid }, + Range { from: '\u{10940}', to: '\u{1097f}', mapping: Disallowed }, + Range { from: '\u{10980}', to: '\u{109b7}', mapping: Valid }, + Range { from: '\u{109b8}', to: '\u{109bb}', mapping: Disallowed }, + Range { from: '\u{109bc}', to: '\u{109cf}', mapping: Valid }, + Range { from: '\u{109d0}', to: '\u{109d1}', mapping: Disallowed }, + Range { from: '\u{109d2}', to: '\u{10a03}', mapping: Valid }, + Range { from: '\u{10a04}', to: '\u{10a04}', mapping: Disallowed }, + Range { from: '\u{10a05}', to: '\u{10a06}', mapping: Valid }, + Range { from: '\u{10a07}', to: '\u{10a0b}', mapping: Disallowed }, + Range { from: '\u{10a0c}', to: '\u{10a13}', mapping: Valid }, + Range { from: '\u{10a14}', to: '\u{10a14}', mapping: Disallowed }, + Range { from: '\u{10a15}', to: '\u{10a17}', mapping: Valid }, + Range { from: '\u{10a18}', to: '\u{10a18}', mapping: Disallowed }, + Range { from: '\u{10a19}', to: '\u{10a33}', mapping: Valid }, + Range { from: '\u{10a34}', to: '\u{10a37}', mapping: Disallowed }, + Range { from: '\u{10a38}', to: '\u{10a3a}', mapping: Valid }, + Range { from: '\u{10a3b}', to: '\u{10a3e}', mapping: Disallowed }, + Range { from: '\u{10a3f}', to: '\u{10a47}', mapping: Valid }, + Range { from: '\u{10a48}', to: '\u{10a4f}', mapping: Disallowed }, + Range { from: '\u{10a50}', to: '\u{10a58}', mapping: Valid }, + Range { from: '\u{10a59}', to: '\u{10a5f}', mapping: Disallowed }, + Range { from: '\u{10a60}', to: '\u{10a9f}', mapping: Valid }, + Range { from: '\u{10aa0}', to: '\u{10abf}', mapping: Disallowed }, + Range { from: '\u{10ac0}', to: '\u{10ae6}', mapping: Valid }, + Range { from: '\u{10ae7}', to: '\u{10aea}', mapping: Disallowed }, + Range { from: '\u{10aeb}', to: '\u{10af6}', mapping: Valid }, + Range { from: '\u{10af7}', to: '\u{10aff}', mapping: Disallowed }, + Range { from: '\u{10b00}', to: '\u{10b35}', mapping: Valid }, + Range { from: '\u{10b36}', to: '\u{10b38}', mapping: Disallowed }, + Range { from: '\u{10b39}', to: '\u{10b55}', mapping: Valid }, + Range { from: '\u{10b56}', to: '\u{10b57}', mapping: Disallowed }, + Range { from: '\u{10b58}', to: '\u{10b72}', mapping: Valid }, + Range { from: '\u{10b73}', to: '\u{10b77}', mapping: Disallowed }, + Range { from: '\u{10b78}', to: '\u{10b91}', mapping: Valid }, + Range { from: '\u{10b92}', to: '\u{10b98}', mapping: Disallowed }, + Range { from: '\u{10b99}', to: '\u{10b9c}', mapping: Valid }, + Range { from: '\u{10b9d}', to: '\u{10ba8}', mapping: Disallowed }, + Range { from: '\u{10ba9}', to: '\u{10baf}', mapping: Valid }, + Range { from: '\u{10bb0}', to: '\u{10bff}', mapping: Disallowed }, + Range { from: '\u{10c00}', to: '\u{10c48}', mapping: Valid }, + Range { from: '\u{10c49}', to: '\u{10c7f}', mapping: Disallowed }, + Range { from: '\u{10c80}', to: '\u{10c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c81}', to: '\u{10c81}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c82}', to: '\u{10c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c83}', to: '\u{10c83}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c84}', to: '\u{10c84}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c85}', to: '\u{10c85}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c86}', to: '\u{10c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c87}', to: '\u{10c87}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c88}', to: '\u{10c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c89}', to: '\u{10c89}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8a}', to: '\u{10c8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8b}', to: '\u{10c8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8c}', to: '\u{10c8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8d}', to: '\u{10c8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8e}', to: '\u{10c8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 39, byte_len: 4 }) }, + Range { from: '\u{10c8f}', to: '\u{10c8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c90}', to: '\u{10c90}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c91}', to: '\u{10c91}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c92}', to: '\u{10c92}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c93}', to: '\u{10c93}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c94}', to: '\u{10c94}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c95}', to: '\u{10c95}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c96}', to: '\u{10c96}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c97}', to: '\u{10c97}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c98}', to: '\u{10c98}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c99}', to: '\u{10c99}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9a}', to: '\u{10c9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9b}', to: '\u{10c9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9c}', to: '\u{10c9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9d}', to: '\u{10c9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9e}', to: '\u{10c9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10c9f}', to: '\u{10c9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca0}', to: '\u{10ca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca1}', to: '\u{10ca1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca2}', to: '\u{10ca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca3}', to: '\u{10ca3}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca4}', to: '\u{10ca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca5}', to: '\u{10ca5}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca6}', to: '\u{10ca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca7}', to: '\u{10ca7}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca8}', to: '\u{10ca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10ca9}', to: '\u{10ca9}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10caa}', to: '\u{10caa}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cab}', to: '\u{10cab}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cac}', to: '\u{10cac}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cad}', to: '\u{10cad}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cae}', to: '\u{10cae}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10caf}', to: '\u{10caf}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cb0}', to: '\u{10cb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cb1}', to: '\u{10cb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cb2}', to: '\u{10cb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{10cb3}', to: '\u{10cbf}', mapping: Disallowed }, + Range { from: '\u{10cc0}', to: '\u{10cf2}', mapping: Valid }, + Range { from: '\u{10cf3}', to: '\u{10cf9}', mapping: Disallowed }, + Range { from: '\u{10cfa}', to: '\u{10cff}', mapping: Valid }, + Range { from: '\u{10d00}', to: '\u{10e5f}', mapping: Disallowed }, + Range { from: '\u{10e60}', to: '\u{10e7e}', mapping: Valid }, + Range { from: '\u{10e7f}', to: '\u{10fff}', mapping: Disallowed }, + Range { from: '\u{11000}', to: '\u{1104d}', mapping: Valid }, + Range { from: '\u{1104e}', to: '\u{11051}', mapping: Disallowed }, + Range { from: '\u{11052}', to: '\u{1106f}', mapping: Valid }, + Range { from: '\u{11070}', to: '\u{1107e}', mapping: Disallowed }, + Range { from: '\u{1107f}', to: '\u{110bc}', mapping: Valid }, + Range { from: '\u{110bd}', to: '\u{110bd}', mapping: Disallowed }, + Range { from: '\u{110be}', to: '\u{110c1}', mapping: Valid }, + Range { from: '\u{110c2}', to: '\u{110cf}', mapping: Disallowed }, + Range { from: '\u{110d0}', to: '\u{110e8}', mapping: Valid }, + Range { from: '\u{110e9}', to: '\u{110ef}', mapping: Disallowed }, + Range { from: '\u{110f0}', to: '\u{110f9}', mapping: Valid }, + Range { from: '\u{110fa}', to: '\u{110ff}', mapping: Disallowed }, + Range { from: '\u{11100}', to: '\u{11134}', mapping: Valid }, + Range { from: '\u{11135}', to: '\u{11135}', mapping: Disallowed }, + Range { from: '\u{11136}', to: '\u{11143}', mapping: Valid }, + Range { from: '\u{11144}', to: '\u{1114f}', mapping: Disallowed }, + Range { from: '\u{11150}', to: '\u{11176}', mapping: Valid }, + Range { from: '\u{11177}', to: '\u{1117f}', mapping: Disallowed }, + Range { from: '\u{11180}', to: '\u{111cd}', mapping: Valid }, + Range { from: '\u{111ce}', to: '\u{111cf}', mapping: Disallowed }, + Range { from: '\u{111d0}', to: '\u{111df}', mapping: Valid }, + Range { from: '\u{111e0}', to: '\u{111e0}', mapping: Disallowed }, + Range { from: '\u{111e1}', to: '\u{111f4}', mapping: Valid }, + Range { from: '\u{111f5}', to: '\u{111ff}', mapping: Disallowed }, + Range { from: '\u{11200}', to: '\u{11211}', mapping: Valid }, + Range { from: '\u{11212}', to: '\u{11212}', mapping: Disallowed }, + Range { from: '\u{11213}', to: '\u{1123e}', mapping: Valid }, + Range { from: '\u{1123f}', to: '\u{1127f}', mapping: Disallowed }, + Range { from: '\u{11280}', to: '\u{11286}', mapping: Valid }, + Range { from: '\u{11287}', to: '\u{11287}', mapping: Disallowed }, + Range { from: '\u{11288}', to: '\u{11288}', mapping: Valid }, + Range { from: '\u{11289}', to: '\u{11289}', mapping: Disallowed }, + Range { from: '\u{1128a}', to: '\u{1128d}', mapping: Valid }, + Range { from: '\u{1128e}', to: '\u{1128e}', mapping: Disallowed }, + Range { from: '\u{1128f}', to: '\u{1129d}', mapping: Valid }, + Range { from: '\u{1129e}', to: '\u{1129e}', mapping: Disallowed }, + Range { from: '\u{1129f}', to: '\u{112a9}', mapping: Valid }, + Range { from: '\u{112aa}', to: '\u{112af}', mapping: Disallowed }, + Range { from: '\u{112b0}', to: '\u{112ea}', mapping: Valid }, + Range { from: '\u{112eb}', to: '\u{112ef}', mapping: Disallowed }, + Range { from: '\u{112f0}', to: '\u{112f9}', mapping: Valid }, + Range { from: '\u{112fa}', to: '\u{112ff}', mapping: Disallowed }, + Range { from: '\u{11300}', to: '\u{11303}', mapping: Valid }, + Range { from: '\u{11304}', to: '\u{11304}', mapping: Disallowed }, + Range { from: '\u{11305}', to: '\u{1130c}', mapping: Valid }, + Range { from: '\u{1130d}', to: '\u{1130e}', mapping: Disallowed }, + Range { from: '\u{1130f}', to: '\u{11310}', mapping: Valid }, + Range { from: '\u{11311}', to: '\u{11312}', mapping: Disallowed }, + Range { from: '\u{11313}', to: '\u{11328}', mapping: Valid }, + Range { from: '\u{11329}', to: '\u{11329}', mapping: Disallowed }, + Range { from: '\u{1132a}', to: '\u{11330}', mapping: Valid }, + Range { from: '\u{11331}', to: '\u{11331}', mapping: Disallowed }, + Range { from: '\u{11332}', to: '\u{11333}', mapping: Valid }, + Range { from: '\u{11334}', to: '\u{11334}', mapping: Disallowed }, + Range { from: '\u{11335}', to: '\u{11339}', mapping: Valid }, + Range { from: '\u{1133a}', to: '\u{1133b}', mapping: Disallowed }, + Range { from: '\u{1133c}', to: '\u{11344}', mapping: Valid }, + Range { from: '\u{11345}', to: '\u{11346}', mapping: Disallowed }, + Range { from: '\u{11347}', to: '\u{11348}', mapping: Valid }, + Range { from: '\u{11349}', to: '\u{1134a}', mapping: Disallowed }, + Range { from: '\u{1134b}', to: '\u{1134d}', mapping: Valid }, + Range { from: '\u{1134e}', to: '\u{1134f}', mapping: Disallowed }, + Range { from: '\u{11350}', to: '\u{11350}', mapping: Valid }, + Range { from: '\u{11351}', to: '\u{11356}', mapping: Disallowed }, + Range { from: '\u{11357}', to: '\u{11357}', mapping: Valid }, + Range { from: '\u{11358}', to: '\u{1135c}', mapping: Disallowed }, + Range { from: '\u{1135d}', to: '\u{11363}', mapping: Valid }, + Range { from: '\u{11364}', to: '\u{11365}', mapping: Disallowed }, + Range { from: '\u{11366}', to: '\u{1136c}', mapping: Valid }, + Range { from: '\u{1136d}', to: '\u{1136f}', mapping: Disallowed }, + Range { from: '\u{11370}', to: '\u{11374}', mapping: Valid }, + Range { from: '\u{11375}', to: '\u{113ff}', mapping: Disallowed }, + Range { from: '\u{11400}', to: '\u{11459}', mapping: Valid }, + Range { from: '\u{1145a}', to: '\u{1145a}', mapping: Disallowed }, + Range { from: '\u{1145b}', to: '\u{1145b}', mapping: Valid }, + Range { from: '\u{1145c}', to: '\u{1145c}', mapping: Disallowed }, + Range { from: '\u{1145d}', to: '\u{1145d}', mapping: Valid }, + Range { from: '\u{1145e}', to: '\u{1147f}', mapping: Disallowed }, + Range { from: '\u{11480}', to: '\u{114c7}', mapping: Valid }, + Range { from: '\u{114c8}', to: '\u{114cf}', mapping: Disallowed }, + Range { from: '\u{114d0}', to: '\u{114d9}', mapping: Valid }, + Range { from: '\u{114da}', to: '\u{1157f}', mapping: Disallowed }, + Range { from: '\u{11580}', to: '\u{115b5}', mapping: Valid }, + Range { from: '\u{115b6}', to: '\u{115b7}', mapping: Disallowed }, + Range { from: '\u{115b8}', to: '\u{115dd}', mapping: Valid }, + Range { from: '\u{115de}', to: '\u{115ff}', mapping: Disallowed }, + Range { from: '\u{11600}', to: '\u{11644}', mapping: Valid }, + Range { from: '\u{11645}', to: '\u{1164f}', mapping: Disallowed }, + Range { from: '\u{11650}', to: '\u{11659}', mapping: Valid }, + Range { from: '\u{1165a}', to: '\u{1165f}', mapping: Disallowed }, + Range { from: '\u{11660}', to: '\u{1166c}', mapping: Valid }, + Range { from: '\u{1166d}', to: '\u{1167f}', mapping: Disallowed }, + Range { from: '\u{11680}', to: '\u{116b7}', mapping: Valid }, + Range { from: '\u{116b8}', to: '\u{116bf}', mapping: Disallowed }, + Range { from: '\u{116c0}', to: '\u{116c9}', mapping: Valid }, + Range { from: '\u{116ca}', to: '\u{116ff}', mapping: Disallowed }, + Range { from: '\u{11700}', to: '\u{11719}', mapping: Valid }, + Range { from: '\u{1171a}', to: '\u{1171c}', mapping: Disallowed }, + Range { from: '\u{1171d}', to: '\u{1172b}', mapping: Valid }, + Range { from: '\u{1172c}', to: '\u{1172f}', mapping: Disallowed }, + Range { from: '\u{11730}', to: '\u{1173f}', mapping: Valid }, + Range { from: '\u{11740}', to: '\u{1189f}', mapping: Disallowed }, + Range { from: '\u{118a0}', to: '\u{118a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a1}', to: '\u{118a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a2}', to: '\u{118a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a3}', to: '\u{118a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a4}', to: '\u{118a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a5}', to: '\u{118a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a6}', to: '\u{118a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a7}', to: '\u{118a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a8}', to: '\u{118a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118a9}', to: '\u{118a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118aa}', to: '\u{118aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118ab}', to: '\u{118ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118ac}', to: '\u{118ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118ad}', to: '\u{118ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118ae}', to: '\u{118ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118af}', to: '\u{118af}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b0}', to: '\u{118b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b1}', to: '\u{118b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b2}', to: '\u{118b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b3}', to: '\u{118b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b4}', to: '\u{118b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b5}', to: '\u{118b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b6}', to: '\u{118b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b7}', to: '\u{118b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b8}', to: '\u{118b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118b9}', to: '\u{118b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118ba}', to: '\u{118ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118bb}', to: '\u{118bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 40, byte_len: 4 }) }, + Range { from: '\u{118bc}', to: '\u{118bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{118bd}', to: '\u{118bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{118be}', to: '\u{118be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{118bf}', to: '\u{118bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{118c0}', to: '\u{118f2}', mapping: Valid }, + Range { from: '\u{118f3}', to: '\u{118fe}', mapping: Disallowed }, + Range { from: '\u{118ff}', to: '\u{118ff}', mapping: Valid }, + Range { from: '\u{11900}', to: '\u{11abf}', mapping: Disallowed }, + Range { from: '\u{11ac0}', to: '\u{11af8}', mapping: Valid }, + Range { from: '\u{11af9}', to: '\u{11bff}', mapping: Disallowed }, + Range { from: '\u{11c00}', to: '\u{11c08}', mapping: Valid }, + Range { from: '\u{11c09}', to: '\u{11c09}', mapping: Disallowed }, + Range { from: '\u{11c0a}', to: '\u{11c36}', mapping: Valid }, + Range { from: '\u{11c37}', to: '\u{11c37}', mapping: Disallowed }, + Range { from: '\u{11c38}', to: '\u{11c45}', mapping: Valid }, + Range { from: '\u{11c46}', to: '\u{11c4f}', mapping: Disallowed }, + Range { from: '\u{11c50}', to: '\u{11c6c}', mapping: Valid }, + Range { from: '\u{11c6d}', to: '\u{11c6f}', mapping: Disallowed }, + Range { from: '\u{11c70}', to: '\u{11c8f}', mapping: Valid }, + Range { from: '\u{11c90}', to: '\u{11c91}', mapping: Disallowed }, + Range { from: '\u{11c92}', to: '\u{11ca7}', mapping: Valid }, + Range { from: '\u{11ca8}', to: '\u{11ca8}', mapping: Disallowed }, + Range { from: '\u{11ca9}', to: '\u{11cb6}', mapping: Valid }, + Range { from: '\u{11cb7}', to: '\u{11fff}', mapping: Disallowed }, + Range { from: '\u{12000}', to: '\u{12399}', mapping: Valid }, + Range { from: '\u{1239a}', to: '\u{123ff}', mapping: Disallowed }, + Range { from: '\u{12400}', to: '\u{1246e}', mapping: Valid }, + Range { from: '\u{1246f}', to: '\u{1246f}', mapping: Disallowed }, + Range { from: '\u{12470}', to: '\u{12474}', mapping: Valid }, + Range { from: '\u{12475}', to: '\u{1247f}', mapping: Disallowed }, + Range { from: '\u{12480}', to: '\u{12543}', mapping: Valid }, + Range { from: '\u{12544}', to: '\u{12fff}', mapping: Disallowed }, + Range { from: '\u{13000}', to: '\u{1342e}', mapping: Valid }, + Range { from: '\u{1342f}', to: '\u{143ff}', mapping: Disallowed }, + Range { from: '\u{14400}', to: '\u{14646}', mapping: Valid }, + Range { from: '\u{14647}', to: '\u{167ff}', mapping: Disallowed }, + Range { from: '\u{16800}', to: '\u{16a38}', mapping: Valid }, + Range { from: '\u{16a39}', to: '\u{16a3f}', mapping: Disallowed }, + Range { from: '\u{16a40}', to: '\u{16a5e}', mapping: Valid }, + Range { from: '\u{16a5f}', to: '\u{16a5f}', mapping: Disallowed }, + Range { from: '\u{16a60}', to: '\u{16a69}', mapping: Valid }, + Range { from: '\u{16a6a}', to: '\u{16a6d}', mapping: Disallowed }, + Range { from: '\u{16a6e}', to: '\u{16a6f}', mapping: Valid }, + Range { from: '\u{16a70}', to: '\u{16acf}', mapping: Disallowed }, + Range { from: '\u{16ad0}', to: '\u{16aed}', mapping: Valid }, + Range { from: '\u{16aee}', to: '\u{16aef}', mapping: Disallowed }, + Range { from: '\u{16af0}', to: '\u{16af5}', mapping: Valid }, + Range { from: '\u{16af6}', to: '\u{16aff}', mapping: Disallowed }, + Range { from: '\u{16b00}', to: '\u{16b45}', mapping: Valid }, + Range { from: '\u{16b46}', to: '\u{16b4f}', mapping: Disallowed }, + Range { from: '\u{16b50}', to: '\u{16b59}', mapping: Valid }, + Range { from: '\u{16b5a}', to: '\u{16b5a}', mapping: Disallowed }, + Range { from: '\u{16b5b}', to: '\u{16b61}', mapping: Valid }, + Range { from: '\u{16b62}', to: '\u{16b62}', mapping: Disallowed }, + Range { from: '\u{16b63}', to: '\u{16b77}', mapping: Valid }, + Range { from: '\u{16b78}', to: '\u{16b7c}', mapping: Disallowed }, + Range { from: '\u{16b7d}', to: '\u{16b8f}', mapping: Valid }, + Range { from: '\u{16b90}', to: '\u{16eff}', mapping: Disallowed }, + Range { from: '\u{16f00}', to: '\u{16f44}', mapping: Valid }, + Range { from: '\u{16f45}', to: '\u{16f4f}', mapping: Disallowed }, + Range { from: '\u{16f50}', to: '\u{16f7e}', mapping: Valid }, + Range { from: '\u{16f7f}', to: '\u{16f8e}', mapping: Disallowed }, + Range { from: '\u{16f8f}', to: '\u{16f9f}', mapping: Valid }, + Range { from: '\u{16fa0}', to: '\u{16fdf}', mapping: Disallowed }, + Range { from: '\u{16fe0}', to: '\u{16fe0}', mapping: Valid }, + Range { from: '\u{16fe1}', to: '\u{16fff}', mapping: Disallowed }, + Range { from: '\u{17000}', to: '\u{187ec}', mapping: Valid }, + Range { from: '\u{187ed}', to: '\u{187ff}', mapping: Disallowed }, + Range { from: '\u{18800}', to: '\u{18af2}', mapping: Valid }, + Range { from: '\u{18af3}', to: '\u{1afff}', mapping: Disallowed }, + Range { from: '\u{1b000}', to: '\u{1b001}', mapping: Valid }, + Range { from: '\u{1b002}', to: '\u{1bbff}', mapping: Disallowed }, + Range { from: '\u{1bc00}', to: '\u{1bc6a}', mapping: Valid }, + Range { from: '\u{1bc6b}', to: '\u{1bc6f}', mapping: Disallowed }, + Range { from: '\u{1bc70}', to: '\u{1bc7c}', mapping: Valid }, + Range { from: '\u{1bc7d}', to: '\u{1bc7f}', mapping: Disallowed }, + Range { from: '\u{1bc80}', to: '\u{1bc88}', mapping: Valid }, + Range { from: '\u{1bc89}', to: '\u{1bc8f}', mapping: Disallowed }, + Range { from: '\u{1bc90}', to: '\u{1bc99}', mapping: Valid }, + Range { from: '\u{1bc9a}', to: '\u{1bc9b}', mapping: Disallowed }, + Range { from: '\u{1bc9c}', to: '\u{1bc9f}', mapping: Valid }, + Range { from: '\u{1bca0}', to: '\u{1bca3}', mapping: Ignored }, + Range { from: '\u{1bca4}', to: '\u{1cfff}', mapping: Disallowed }, + Range { from: '\u{1d000}', to: '\u{1d0f5}', mapping: Valid }, + Range { from: '\u{1d0f6}', to: '\u{1d0ff}', mapping: Disallowed }, + Range { from: '\u{1d100}', to: '\u{1d126}', mapping: Valid }, + Range { from: '\u{1d127}', to: '\u{1d128}', mapping: Disallowed }, + Range { from: '\u{1d129}', to: '\u{1d15d}', mapping: Valid }, + Range { from: '\u{1d15e}', to: '\u{1d15e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 41, byte_len: 8 }) }, + Range { from: '\u{1d15f}', to: '\u{1d15f}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 41, byte_len: 8 }) }, + Range { from: '\u{1d160}', to: '\u{1d160}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d161}', to: '\u{1d161}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d162}', to: '\u{1d162}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d163}', to: '\u{1d163}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d164}', to: '\u{1d164}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d165}', to: '\u{1d172}', mapping: Valid }, + Range { from: '\u{1d173}', to: '\u{1d17a}', mapping: Disallowed }, + Range { from: '\u{1d17b}', to: '\u{1d1ba}', mapping: Valid }, + Range { from: '\u{1d1bb}', to: '\u{1d1bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 41, byte_len: 8 }) }, + Range { from: '\u{1d1bc}', to: '\u{1d1bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 41, byte_len: 8 }) }, + Range { from: '\u{1d1bd}', to: '\u{1d1bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d1be}', to: '\u{1d1be}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d1bf}', to: '\u{1d1bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d1c0}', to: '\u{1d1c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 41, byte_len: 12 }) }, + Range { from: '\u{1d1c1}', to: '\u{1d1e8}', mapping: Valid }, + Range { from: '\u{1d1e9}', to: '\u{1d1ff}', mapping: Disallowed }, + Range { from: '\u{1d200}', to: '\u{1d245}', mapping: Valid }, + Range { from: '\u{1d246}', to: '\u{1d2ff}', mapping: Disallowed }, + Range { from: '\u{1d300}', to: '\u{1d356}', mapping: Valid }, + Range { from: '\u{1d357}', to: '\u{1d35f}', mapping: Disallowed }, + Range { from: '\u{1d360}', to: '\u{1d371}', mapping: Valid }, + Range { from: '\u{1d372}', to: '\u{1d3ff}', mapping: Disallowed }, + Range { from: '\u{1d400}', to: '\u{1d400}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d401}', to: '\u{1d401}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d402}', to: '\u{1d402}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d403}', to: '\u{1d403}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d404}', to: '\u{1d404}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d405}', to: '\u{1d405}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d406}', to: '\u{1d406}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d407}', to: '\u{1d407}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d408}', to: '\u{1d408}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d409}', to: '\u{1d409}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40a}', to: '\u{1d40a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40b}', to: '\u{1d40b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40c}', to: '\u{1d40c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40d}', to: '\u{1d40d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40e}', to: '\u{1d40e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d40f}', to: '\u{1d40f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d410}', to: '\u{1d410}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d411}', to: '\u{1d411}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d412}', to: '\u{1d412}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d413}', to: '\u{1d413}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d414}', to: '\u{1d414}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d415}', to: '\u{1d415}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d416}', to: '\u{1d416}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d417}', to: '\u{1d417}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d418}', to: '\u{1d418}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d419}', to: '\u{1d419}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41a}', to: '\u{1d41a}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41b}', to: '\u{1d41b}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41c}', to: '\u{1d41c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41d}', to: '\u{1d41d}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41e}', to: '\u{1d41e}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d41f}', to: '\u{1d41f}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d420}', to: '\u{1d420}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d421}', to: '\u{1d421}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d422}', to: '\u{1d422}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d423}', to: '\u{1d423}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d424}', to: '\u{1d424}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d425}', to: '\u{1d425}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d426}', to: '\u{1d426}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d427}', to: '\u{1d427}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d428}', to: '\u{1d428}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d429}', to: '\u{1d429}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42a}', to: '\u{1d42a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42b}', to: '\u{1d42b}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42c}', to: '\u{1d42c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42d}', to: '\u{1d42d}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42e}', to: '\u{1d42e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d42f}', to: '\u{1d42f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d430}', to: '\u{1d430}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d431}', to: '\u{1d431}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d432}', to: '\u{1d432}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d433}', to: '\u{1d433}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d434}', to: '\u{1d434}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d435}', to: '\u{1d435}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d436}', to: '\u{1d436}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d437}', to: '\u{1d437}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d438}', to: '\u{1d438}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d439}', to: '\u{1d439}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43a}', to: '\u{1d43a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43b}', to: '\u{1d43b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43c}', to: '\u{1d43c}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43d}', to: '\u{1d43d}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43e}', to: '\u{1d43e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d43f}', to: '\u{1d43f}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d440}', to: '\u{1d440}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d441}', to: '\u{1d441}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d442}', to: '\u{1d442}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d443}', to: '\u{1d443}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d444}', to: '\u{1d444}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d445}', to: '\u{1d445}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d446}', to: '\u{1d446}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d447}', to: '\u{1d447}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d448}', to: '\u{1d448}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d449}', to: '\u{1d449}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44a}', to: '\u{1d44a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44b}', to: '\u{1d44b}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44c}', to: '\u{1d44c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44d}', to: '\u{1d44d}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44e}', to: '\u{1d44e}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d44f}', to: '\u{1d44f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d450}', to: '\u{1d450}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d451}', to: '\u{1d451}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d452}', to: '\u{1d452}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d453}', to: '\u{1d453}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d454}', to: '\u{1d454}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d455}', to: '\u{1d455}', mapping: Disallowed }, + Range { from: '\u{1d456}', to: '\u{1d456}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d457}', to: '\u{1d457}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d458}', to: '\u{1d458}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d459}', to: '\u{1d459}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45a}', to: '\u{1d45a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45b}', to: '\u{1d45b}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45c}', to: '\u{1d45c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45d}', to: '\u{1d45d}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45e}', to: '\u{1d45e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d45f}', to: '\u{1d45f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d460}', to: '\u{1d460}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d461}', to: '\u{1d461}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d462}', to: '\u{1d462}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d463}', to: '\u{1d463}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d464}', to: '\u{1d464}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d465}', to: '\u{1d465}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d466}', to: '\u{1d466}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d467}', to: '\u{1d467}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d468}', to: '\u{1d468}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d469}', to: '\u{1d469}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46a}', to: '\u{1d46a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46b}', to: '\u{1d46b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46c}', to: '\u{1d46c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46d}', to: '\u{1d46d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46e}', to: '\u{1d46e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d46f}', to: '\u{1d46f}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d470}', to: '\u{1d470}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d471}', to: '\u{1d471}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d472}', to: '\u{1d472}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d473}', to: '\u{1d473}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d474}', to: '\u{1d474}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d475}', to: '\u{1d475}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d476}', to: '\u{1d476}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d477}', to: '\u{1d477}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d478}', to: '\u{1d478}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d479}', to: '\u{1d479}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47a}', to: '\u{1d47a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47b}', to: '\u{1d47b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47c}', to: '\u{1d47c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47d}', to: '\u{1d47d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47e}', to: '\u{1d47e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d47f}', to: '\u{1d47f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d480}', to: '\u{1d480}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d481}', to: '\u{1d481}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d482}', to: '\u{1d482}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d483}', to: '\u{1d483}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d484}', to: '\u{1d484}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d485}', to: '\u{1d485}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d486}', to: '\u{1d486}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d487}', to: '\u{1d487}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d488}', to: '\u{1d488}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d489}', to: '\u{1d489}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48a}', to: '\u{1d48a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48b}', to: '\u{1d48b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48c}', to: '\u{1d48c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48d}', to: '\u{1d48d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48e}', to: '\u{1d48e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d48f}', to: '\u{1d48f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d490}', to: '\u{1d490}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d491}', to: '\u{1d491}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d492}', to: '\u{1d492}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d493}', to: '\u{1d493}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d494}', to: '\u{1d494}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d495}', to: '\u{1d495}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d496}', to: '\u{1d496}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d497}', to: '\u{1d497}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d498}', to: '\u{1d498}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d499}', to: '\u{1d499}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49a}', to: '\u{1d49a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49b}', to: '\u{1d49b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49c}', to: '\u{1d49c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49d}', to: '\u{1d49d}', mapping: Disallowed }, + Range { from: '\u{1d49e}', to: '\u{1d49e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d49f}', to: '\u{1d49f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4a0}', to: '\u{1d4a1}', mapping: Disallowed }, + Range { from: '\u{1d4a2}', to: '\u{1d4a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4a3}', to: '\u{1d4a4}', mapping: Disallowed }, + Range { from: '\u{1d4a5}', to: '\u{1d4a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4a6}', to: '\u{1d4a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4a7}', to: '\u{1d4a8}', mapping: Disallowed }, + Range { from: '\u{1d4a9}', to: '\u{1d4a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4aa}', to: '\u{1d4aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ab}', to: '\u{1d4ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ac}', to: '\u{1d4ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ad}', to: '\u{1d4ad}', mapping: Disallowed }, + Range { from: '\u{1d4ae}', to: '\u{1d4ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4af}', to: '\u{1d4af}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b0}', to: '\u{1d4b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b1}', to: '\u{1d4b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b2}', to: '\u{1d4b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b3}', to: '\u{1d4b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b4}', to: '\u{1d4b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b5}', to: '\u{1d4b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b6}', to: '\u{1d4b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b7}', to: '\u{1d4b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b8}', to: '\u{1d4b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4b9}', to: '\u{1d4b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ba}', to: '\u{1d4ba}', mapping: Disallowed }, + Range { from: '\u{1d4bb}', to: '\u{1d4bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4bc}', to: '\u{1d4bc}', mapping: Disallowed }, + Range { from: '\u{1d4bd}', to: '\u{1d4bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4be}', to: '\u{1d4be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4bf}', to: '\u{1d4bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c0}', to: '\u{1d4c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c1}', to: '\u{1d4c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c2}', to: '\u{1d4c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c3}', to: '\u{1d4c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c4}', to: '\u{1d4c4}', mapping: Disallowed }, + Range { from: '\u{1d4c5}', to: '\u{1d4c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c6}', to: '\u{1d4c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c7}', to: '\u{1d4c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c8}', to: '\u{1d4c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4c9}', to: '\u{1d4c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ca}', to: '\u{1d4ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4cb}', to: '\u{1d4cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4cc}', to: '\u{1d4cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4cd}', to: '\u{1d4cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ce}', to: '\u{1d4ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4cf}', to: '\u{1d4cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d0}', to: '\u{1d4d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d1}', to: '\u{1d4d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d2}', to: '\u{1d4d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d3}', to: '\u{1d4d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d4}', to: '\u{1d4d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d5}', to: '\u{1d4d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d6}', to: '\u{1d4d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d7}', to: '\u{1d4d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d8}', to: '\u{1d4d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4d9}', to: '\u{1d4d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4da}', to: '\u{1d4da}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4db}', to: '\u{1d4db}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4dc}', to: '\u{1d4dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4dd}', to: '\u{1d4dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4de}', to: '\u{1d4de}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4df}', to: '\u{1d4df}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e0}', to: '\u{1d4e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e1}', to: '\u{1d4e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e2}', to: '\u{1d4e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e3}', to: '\u{1d4e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e4}', to: '\u{1d4e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e5}', to: '\u{1d4e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e6}', to: '\u{1d4e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e7}', to: '\u{1d4e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e8}', to: '\u{1d4e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4e9}', to: '\u{1d4e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ea}', to: '\u{1d4ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4eb}', to: '\u{1d4eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ec}', to: '\u{1d4ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ed}', to: '\u{1d4ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ee}', to: '\u{1d4ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ef}', to: '\u{1d4ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f0}', to: '\u{1d4f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f1}', to: '\u{1d4f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f2}', to: '\u{1d4f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f3}', to: '\u{1d4f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f4}', to: '\u{1d4f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f5}', to: '\u{1d4f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f6}', to: '\u{1d4f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f7}', to: '\u{1d4f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f8}', to: '\u{1d4f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4f9}', to: '\u{1d4f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4fa}', to: '\u{1d4fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4fb}', to: '\u{1d4fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4fc}', to: '\u{1d4fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4fd}', to: '\u{1d4fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4fe}', to: '\u{1d4fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d4ff}', to: '\u{1d4ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d500}', to: '\u{1d500}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d501}', to: '\u{1d501}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d502}', to: '\u{1d502}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d503}', to: '\u{1d503}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d504}', to: '\u{1d504}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d505}', to: '\u{1d505}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d506}', to: '\u{1d506}', mapping: Disallowed }, + Range { from: '\u{1d507}', to: '\u{1d507}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d508}', to: '\u{1d508}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d509}', to: '\u{1d509}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d50a}', to: '\u{1d50a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d50b}', to: '\u{1d50c}', mapping: Disallowed }, + Range { from: '\u{1d50d}', to: '\u{1d50d}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d50e}', to: '\u{1d50e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d50f}', to: '\u{1d50f}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d510}', to: '\u{1d510}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d511}', to: '\u{1d511}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d512}', to: '\u{1d512}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d513}', to: '\u{1d513}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d514}', to: '\u{1d514}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d515}', to: '\u{1d515}', mapping: Disallowed }, + Range { from: '\u{1d516}', to: '\u{1d516}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d517}', to: '\u{1d517}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d518}', to: '\u{1d518}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d519}', to: '\u{1d519}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51a}', to: '\u{1d51a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51b}', to: '\u{1d51b}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51c}', to: '\u{1d51c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51d}', to: '\u{1d51d}', mapping: Disallowed }, + Range { from: '\u{1d51e}', to: '\u{1d51e}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d51f}', to: '\u{1d51f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d520}', to: '\u{1d520}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d521}', to: '\u{1d521}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d522}', to: '\u{1d522}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d523}', to: '\u{1d523}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d524}', to: '\u{1d524}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d525}', to: '\u{1d525}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d526}', to: '\u{1d526}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d527}', to: '\u{1d527}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d528}', to: '\u{1d528}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d529}', to: '\u{1d529}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52a}', to: '\u{1d52a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52b}', to: '\u{1d52b}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52c}', to: '\u{1d52c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52d}', to: '\u{1d52d}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52e}', to: '\u{1d52e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d52f}', to: '\u{1d52f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d530}', to: '\u{1d530}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d531}', to: '\u{1d531}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d532}', to: '\u{1d532}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d533}', to: '\u{1d533}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d534}', to: '\u{1d534}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d535}', to: '\u{1d535}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d536}', to: '\u{1d536}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d537}', to: '\u{1d537}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d538}', to: '\u{1d538}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d539}', to: '\u{1d539}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53a}', to: '\u{1d53a}', mapping: Disallowed }, + Range { from: '\u{1d53b}', to: '\u{1d53b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53c}', to: '\u{1d53c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53d}', to: '\u{1d53d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53e}', to: '\u{1d53e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d53f}', to: '\u{1d53f}', mapping: Disallowed }, + Range { from: '\u{1d540}', to: '\u{1d540}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d541}', to: '\u{1d541}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d542}', to: '\u{1d542}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d543}', to: '\u{1d543}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d544}', to: '\u{1d544}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d545}', to: '\u{1d545}', mapping: Disallowed }, + Range { from: '\u{1d546}', to: '\u{1d546}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d547}', to: '\u{1d549}', mapping: Disallowed }, + Range { from: '\u{1d54a}', to: '\u{1d54a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d54b}', to: '\u{1d54b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d54c}', to: '\u{1d54c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d54d}', to: '\u{1d54d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d54e}', to: '\u{1d54e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d54f}', to: '\u{1d54f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d550}', to: '\u{1d550}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d551}', to: '\u{1d551}', mapping: Disallowed }, + Range { from: '\u{1d552}', to: '\u{1d552}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d553}', to: '\u{1d553}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d554}', to: '\u{1d554}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d555}', to: '\u{1d555}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d556}', to: '\u{1d556}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d557}', to: '\u{1d557}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d558}', to: '\u{1d558}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d559}', to: '\u{1d559}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55a}', to: '\u{1d55a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55b}', to: '\u{1d55b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55c}', to: '\u{1d55c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55d}', to: '\u{1d55d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55e}', to: '\u{1d55e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d55f}', to: '\u{1d55f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d560}', to: '\u{1d560}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d561}', to: '\u{1d561}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d562}', to: '\u{1d562}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d563}', to: '\u{1d563}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d564}', to: '\u{1d564}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d565}', to: '\u{1d565}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d566}', to: '\u{1d566}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d567}', to: '\u{1d567}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d568}', to: '\u{1d568}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d569}', to: '\u{1d569}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56a}', to: '\u{1d56a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56b}', to: '\u{1d56b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56c}', to: '\u{1d56c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56d}', to: '\u{1d56d}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56e}', to: '\u{1d56e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d56f}', to: '\u{1d56f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d570}', to: '\u{1d570}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d571}', to: '\u{1d571}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d572}', to: '\u{1d572}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d573}', to: '\u{1d573}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d574}', to: '\u{1d574}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d575}', to: '\u{1d575}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d576}', to: '\u{1d576}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d577}', to: '\u{1d577}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d578}', to: '\u{1d578}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d579}', to: '\u{1d579}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57a}', to: '\u{1d57a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57b}', to: '\u{1d57b}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57c}', to: '\u{1d57c}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57d}', to: '\u{1d57d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57e}', to: '\u{1d57e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d57f}', to: '\u{1d57f}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d580}', to: '\u{1d580}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d581}', to: '\u{1d581}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d582}', to: '\u{1d582}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d583}', to: '\u{1d583}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d584}', to: '\u{1d584}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d585}', to: '\u{1d585}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d586}', to: '\u{1d586}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d587}', to: '\u{1d587}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d588}', to: '\u{1d588}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d589}', to: '\u{1d589}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58a}', to: '\u{1d58a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58b}', to: '\u{1d58b}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58c}', to: '\u{1d58c}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58d}', to: '\u{1d58d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58e}', to: '\u{1d58e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d58f}', to: '\u{1d58f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d590}', to: '\u{1d590}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d591}', to: '\u{1d591}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d592}', to: '\u{1d592}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d593}', to: '\u{1d593}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d594}', to: '\u{1d594}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d595}', to: '\u{1d595}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d596}', to: '\u{1d596}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d597}', to: '\u{1d597}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d598}', to: '\u{1d598}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d599}', to: '\u{1d599}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59a}', to: '\u{1d59a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59b}', to: '\u{1d59b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59c}', to: '\u{1d59c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59d}', to: '\u{1d59d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59e}', to: '\u{1d59e}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d59f}', to: '\u{1d59f}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a0}', to: '\u{1d5a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a1}', to: '\u{1d5a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a2}', to: '\u{1d5a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a3}', to: '\u{1d5a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a4}', to: '\u{1d5a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a5}', to: '\u{1d5a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a6}', to: '\u{1d5a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a7}', to: '\u{1d5a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a8}', to: '\u{1d5a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5a9}', to: '\u{1d5a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5aa}', to: '\u{1d5aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ab}', to: '\u{1d5ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ac}', to: '\u{1d5ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ad}', to: '\u{1d5ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ae}', to: '\u{1d5ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5af}', to: '\u{1d5af}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b0}', to: '\u{1d5b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b1}', to: '\u{1d5b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b2}', to: '\u{1d5b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b3}', to: '\u{1d5b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b4}', to: '\u{1d5b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b5}', to: '\u{1d5b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b6}', to: '\u{1d5b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b7}', to: '\u{1d5b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b8}', to: '\u{1d5b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5b9}', to: '\u{1d5b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ba}', to: '\u{1d5ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5bb}', to: '\u{1d5bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5bc}', to: '\u{1d5bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5bd}', to: '\u{1d5bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5be}', to: '\u{1d5be}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5bf}', to: '\u{1d5bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c0}', to: '\u{1d5c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c1}', to: '\u{1d5c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c2}', to: '\u{1d5c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c3}', to: '\u{1d5c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c4}', to: '\u{1d5c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c5}', to: '\u{1d5c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c6}', to: '\u{1d5c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c7}', to: '\u{1d5c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c8}', to: '\u{1d5c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5c9}', to: '\u{1d5c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ca}', to: '\u{1d5ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5cb}', to: '\u{1d5cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5cc}', to: '\u{1d5cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5cd}', to: '\u{1d5cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ce}', to: '\u{1d5ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5cf}', to: '\u{1d5cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d0}', to: '\u{1d5d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d1}', to: '\u{1d5d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d2}', to: '\u{1d5d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d3}', to: '\u{1d5d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d4}', to: '\u{1d5d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d5}', to: '\u{1d5d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d6}', to: '\u{1d5d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d7}', to: '\u{1d5d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d8}', to: '\u{1d5d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5d9}', to: '\u{1d5d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5da}', to: '\u{1d5da}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5db}', to: '\u{1d5db}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5dc}', to: '\u{1d5dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5dd}', to: '\u{1d5dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5de}', to: '\u{1d5de}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5df}', to: '\u{1d5df}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e0}', to: '\u{1d5e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e1}', to: '\u{1d5e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e2}', to: '\u{1d5e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e3}', to: '\u{1d5e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e4}', to: '\u{1d5e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e5}', to: '\u{1d5e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e6}', to: '\u{1d5e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e7}', to: '\u{1d5e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e8}', to: '\u{1d5e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5e9}', to: '\u{1d5e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ea}', to: '\u{1d5ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5eb}', to: '\u{1d5eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ec}', to: '\u{1d5ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ed}', to: '\u{1d5ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ee}', to: '\u{1d5ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ef}', to: '\u{1d5ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f0}', to: '\u{1d5f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f1}', to: '\u{1d5f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f2}', to: '\u{1d5f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f3}', to: '\u{1d5f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f4}', to: '\u{1d5f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f5}', to: '\u{1d5f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f6}', to: '\u{1d5f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f7}', to: '\u{1d5f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f8}', to: '\u{1d5f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5f9}', to: '\u{1d5f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5fa}', to: '\u{1d5fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5fb}', to: '\u{1d5fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5fc}', to: '\u{1d5fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5fd}', to: '\u{1d5fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5fe}', to: '\u{1d5fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d5ff}', to: '\u{1d5ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d600}', to: '\u{1d600}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d601}', to: '\u{1d601}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d602}', to: '\u{1d602}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d603}', to: '\u{1d603}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d604}', to: '\u{1d604}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d605}', to: '\u{1d605}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d606}', to: '\u{1d606}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d607}', to: '\u{1d607}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d608}', to: '\u{1d608}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d609}', to: '\u{1d609}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60a}', to: '\u{1d60a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60b}', to: '\u{1d60b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60c}', to: '\u{1d60c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60d}', to: '\u{1d60d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60e}', to: '\u{1d60e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d60f}', to: '\u{1d60f}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d610}', to: '\u{1d610}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d611}', to: '\u{1d611}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d612}', to: '\u{1d612}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d613}', to: '\u{1d613}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d614}', to: '\u{1d614}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d615}', to: '\u{1d615}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d616}', to: '\u{1d616}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d617}', to: '\u{1d617}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d618}', to: '\u{1d618}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d619}', to: '\u{1d619}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61a}', to: '\u{1d61a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61b}', to: '\u{1d61b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61c}', to: '\u{1d61c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61d}', to: '\u{1d61d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61e}', to: '\u{1d61e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d61f}', to: '\u{1d61f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d620}', to: '\u{1d620}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d621}', to: '\u{1d621}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d622}', to: '\u{1d622}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d623}', to: '\u{1d623}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d624}', to: '\u{1d624}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d625}', to: '\u{1d625}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d626}', to: '\u{1d626}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d627}', to: '\u{1d627}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d628}', to: '\u{1d628}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d629}', to: '\u{1d629}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62a}', to: '\u{1d62a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62b}', to: '\u{1d62b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62c}', to: '\u{1d62c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62d}', to: '\u{1d62d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62e}', to: '\u{1d62e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d62f}', to: '\u{1d62f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d630}', to: '\u{1d630}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d631}', to: '\u{1d631}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d632}', to: '\u{1d632}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d633}', to: '\u{1d633}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d634}', to: '\u{1d634}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d635}', to: '\u{1d635}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d636}', to: '\u{1d636}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d637}', to: '\u{1d637}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d638}', to: '\u{1d638}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d639}', to: '\u{1d639}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63a}', to: '\u{1d63a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63b}', to: '\u{1d63b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63c}', to: '\u{1d63c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63d}', to: '\u{1d63d}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63e}', to: '\u{1d63e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d63f}', to: '\u{1d63f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d640}', to: '\u{1d640}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d641}', to: '\u{1d641}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d642}', to: '\u{1d642}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d643}', to: '\u{1d643}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d644}', to: '\u{1d644}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d645}', to: '\u{1d645}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d646}', to: '\u{1d646}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d647}', to: '\u{1d647}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d648}', to: '\u{1d648}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d649}', to: '\u{1d649}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64a}', to: '\u{1d64a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64b}', to: '\u{1d64b}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64c}', to: '\u{1d64c}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64d}', to: '\u{1d64d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64e}', to: '\u{1d64e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d64f}', to: '\u{1d64f}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d650}', to: '\u{1d650}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d651}', to: '\u{1d651}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d652}', to: '\u{1d652}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d653}', to: '\u{1d653}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d654}', to: '\u{1d654}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d655}', to: '\u{1d655}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d656}', to: '\u{1d656}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d657}', to: '\u{1d657}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d658}', to: '\u{1d658}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d659}', to: '\u{1d659}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65a}', to: '\u{1d65a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65b}', to: '\u{1d65b}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65c}', to: '\u{1d65c}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65d}', to: '\u{1d65d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65e}', to: '\u{1d65e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d65f}', to: '\u{1d65f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d660}', to: '\u{1d660}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d661}', to: '\u{1d661}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d662}', to: '\u{1d662}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d663}', to: '\u{1d663}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d664}', to: '\u{1d664}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d665}', to: '\u{1d665}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d666}', to: '\u{1d666}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d667}', to: '\u{1d667}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d668}', to: '\u{1d668}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d669}', to: '\u{1d669}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66a}', to: '\u{1d66a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66b}', to: '\u{1d66b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66c}', to: '\u{1d66c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66d}', to: '\u{1d66d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66e}', to: '\u{1d66e}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d66f}', to: '\u{1d66f}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d670}', to: '\u{1d670}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d671}', to: '\u{1d671}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d672}', to: '\u{1d672}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d673}', to: '\u{1d673}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d674}', to: '\u{1d674}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d675}', to: '\u{1d675}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d676}', to: '\u{1d676}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d677}', to: '\u{1d677}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d678}', to: '\u{1d678}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d679}', to: '\u{1d679}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67a}', to: '\u{1d67a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67b}', to: '\u{1d67b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67c}', to: '\u{1d67c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67d}', to: '\u{1d67d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67e}', to: '\u{1d67e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d67f}', to: '\u{1d67f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d680}', to: '\u{1d680}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d681}', to: '\u{1d681}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d682}', to: '\u{1d682}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d683}', to: '\u{1d683}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d684}', to: '\u{1d684}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d685}', to: '\u{1d685}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d686}', to: '\u{1d686}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d687}', to: '\u{1d687}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d688}', to: '\u{1d688}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d689}', to: '\u{1d689}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68a}', to: '\u{1d68a}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68b}', to: '\u{1d68b}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68c}', to: '\u{1d68c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68d}', to: '\u{1d68d}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68e}', to: '\u{1d68e}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d68f}', to: '\u{1d68f}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d690}', to: '\u{1d690}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d691}', to: '\u{1d691}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d692}', to: '\u{1d692}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d693}', to: '\u{1d693}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d694}', to: '\u{1d694}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d695}', to: '\u{1d695}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d696}', to: '\u{1d696}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d697}', to: '\u{1d697}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d698}', to: '\u{1d698}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d699}', to: '\u{1d699}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69a}', to: '\u{1d69a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69b}', to: '\u{1d69b}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69c}', to: '\u{1d69c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69d}', to: '\u{1d69d}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69e}', to: '\u{1d69e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d69f}', to: '\u{1d69f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d6a0}', to: '\u{1d6a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d6a1}', to: '\u{1d6a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d6a2}', to: '\u{1d6a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d6a3}', to: '\u{1d6a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d6a4}', to: '\u{1d6a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 41, byte_len: 2 }) }, + Range { from: '\u{1d6a5}', to: '\u{1d6a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 41, byte_len: 2 }) }, + Range { from: '\u{1d6a6}', to: '\u{1d6a7}', mapping: Disallowed }, + Range { from: '\u{1d6a8}', to: '\u{1d6a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6a9}', to: '\u{1d6a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6aa}', to: '\u{1d6aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ab}', to: '\u{1d6ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ac}', to: '\u{1d6ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ad}', to: '\u{1d6ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ae}', to: '\u{1d6ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6af}', to: '\u{1d6af}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b0}', to: '\u{1d6b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d6b1}', to: '\u{1d6b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b2}', to: '\u{1d6b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b3}', to: '\u{1d6b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d6b4}', to: '\u{1d6b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b5}', to: '\u{1d6b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b6}', to: '\u{1d6b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b7}', to: '\u{1d6b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b8}', to: '\u{1d6b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6b9}', to: '\u{1d6b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ba}', to: '\u{1d6ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6bb}', to: '\u{1d6bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6bc}', to: '\u{1d6bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6bd}', to: '\u{1d6bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6be}', to: '\u{1d6be}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6bf}', to: '\u{1d6bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c0}', to: '\u{1d6c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c1}', to: '\u{1d6c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d6c2}', to: '\u{1d6c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c3}', to: '\u{1d6c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c4}', to: '\u{1d6c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c5}', to: '\u{1d6c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c6}', to: '\u{1d6c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c7}', to: '\u{1d6c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c8}', to: '\u{1d6c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6c9}', to: '\u{1d6c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ca}', to: '\u{1d6ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d6cb}', to: '\u{1d6cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6cc}', to: '\u{1d6cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6cd}', to: '\u{1d6cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d6ce}', to: '\u{1d6ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6cf}', to: '\u{1d6cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d0}', to: '\u{1d6d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d1}', to: '\u{1d6d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d2}', to: '\u{1d6d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d3}', to: '\u{1d6d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d5}', to: '\u{1d6d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d6}', to: '\u{1d6d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d7}', to: '\u{1d6d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d8}', to: '\u{1d6d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6d9}', to: '\u{1d6d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6da}', to: '\u{1d6da}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6db}', to: '\u{1d6db}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d6dc}', to: '\u{1d6dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6dd}', to: '\u{1d6dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6de}', to: '\u{1d6de}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6df}', to: '\u{1d6df}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e0}', to: '\u{1d6e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e1}', to: '\u{1d6e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e2}', to: '\u{1d6e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e3}', to: '\u{1d6e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e4}', to: '\u{1d6e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e5}', to: '\u{1d6e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e6}', to: '\u{1d6e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e7}', to: '\u{1d6e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e8}', to: '\u{1d6e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6e9}', to: '\u{1d6e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ea}', to: '\u{1d6ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d6eb}', to: '\u{1d6eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ec}', to: '\u{1d6ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ed}', to: '\u{1d6ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d6ee}', to: '\u{1d6ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ef}', to: '\u{1d6ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f0}', to: '\u{1d6f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f1}', to: '\u{1d6f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f2}', to: '\u{1d6f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f3}', to: '\u{1d6f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f4}', to: '\u{1d6f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f5}', to: '\u{1d6f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f6}', to: '\u{1d6f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f7}', to: '\u{1d6f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f8}', to: '\u{1d6f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6f9}', to: '\u{1d6f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6fa}', to: '\u{1d6fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6fb}', to: '\u{1d6fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d6fc}', to: '\u{1d6fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6fd}', to: '\u{1d6fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6fe}', to: '\u{1d6fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d6ff}', to: '\u{1d6ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d700}', to: '\u{1d700}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d701}', to: '\u{1d701}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d702}', to: '\u{1d702}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d703}', to: '\u{1d703}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d704}', to: '\u{1d704}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d705}', to: '\u{1d705}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d706}', to: '\u{1d706}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d707}', to: '\u{1d707}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d708}', to: '\u{1d708}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d709}', to: '\u{1d709}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d70a}', to: '\u{1d70a}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d70b}', to: '\u{1d70b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d70c}', to: '\u{1d70c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d70d}', to: '\u{1d70e}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d70f}', to: '\u{1d70f}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d710}', to: '\u{1d710}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d711}', to: '\u{1d711}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d712}', to: '\u{1d712}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d713}', to: '\u{1d713}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d714}', to: '\u{1d714}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d715}', to: '\u{1d715}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d716}', to: '\u{1d716}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d717}', to: '\u{1d717}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d718}', to: '\u{1d718}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d719}', to: '\u{1d719}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71a}', to: '\u{1d71a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71b}', to: '\u{1d71b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71c}', to: '\u{1d71c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71d}', to: '\u{1d71d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71e}', to: '\u{1d71e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d71f}', to: '\u{1d71f}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d720}', to: '\u{1d720}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d721}', to: '\u{1d721}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d722}', to: '\u{1d722}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d723}', to: '\u{1d723}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d724}', to: '\u{1d724}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d725}', to: '\u{1d725}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d726}', to: '\u{1d726}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d727}', to: '\u{1d727}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d728}', to: '\u{1d728}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d729}', to: '\u{1d729}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72a}', to: '\u{1d72a}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72b}', to: '\u{1d72b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72c}', to: '\u{1d72c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72d}', to: '\u{1d72d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72e}', to: '\u{1d72e}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d72f}', to: '\u{1d72f}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d730}', to: '\u{1d730}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d731}', to: '\u{1d731}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d732}', to: '\u{1d732}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d733}', to: '\u{1d733}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d734}', to: '\u{1d734}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d735}', to: '\u{1d735}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d736}', to: '\u{1d736}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d737}', to: '\u{1d737}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d738}', to: '\u{1d738}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d739}', to: '\u{1d739}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d73a}', to: '\u{1d73a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d73b}', to: '\u{1d73b}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d73c}', to: '\u{1d73c}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d73d}', to: '\u{1d73d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d73e}', to: '\u{1d73e}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d73f}', to: '\u{1d73f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d740}', to: '\u{1d740}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d741}', to: '\u{1d741}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d742}', to: '\u{1d742}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d743}', to: '\u{1d743}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d744}', to: '\u{1d744}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d745}', to: '\u{1d745}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d746}', to: '\u{1d746}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d747}', to: '\u{1d748}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d749}', to: '\u{1d749}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74a}', to: '\u{1d74a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74b}', to: '\u{1d74b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74c}', to: '\u{1d74c}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74d}', to: '\u{1d74d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74e}', to: '\u{1d74e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d74f}', to: '\u{1d74f}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d750}', to: '\u{1d750}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d751}', to: '\u{1d751}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d752}', to: '\u{1d752}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d753}', to: '\u{1d753}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d754}', to: '\u{1d754}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d755}', to: '\u{1d755}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d756}', to: '\u{1d756}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d757}', to: '\u{1d757}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d758}', to: '\u{1d758}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d759}', to: '\u{1d759}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d75a}', to: '\u{1d75a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d75b}', to: '\u{1d75b}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d75c}', to: '\u{1d75c}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d75d}', to: '\u{1d75d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d75e}', to: '\u{1d75e}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d75f}', to: '\u{1d75f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d760}', to: '\u{1d760}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d761}', to: '\u{1d761}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d762}', to: '\u{1d762}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d763}', to: '\u{1d763}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d764}', to: '\u{1d764}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d765}', to: '\u{1d765}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d766}', to: '\u{1d766}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d767}', to: '\u{1d767}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d768}', to: '\u{1d768}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d769}', to: '\u{1d769}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76a}', to: '\u{1d76a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76b}', to: '\u{1d76b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76c}', to: '\u{1d76c}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76d}', to: '\u{1d76d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76e}', to: '\u{1d76e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d76f}', to: '\u{1d76f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d770}', to: '\u{1d770}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d771}', to: '\u{1d771}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d772}', to: '\u{1d772}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d773}', to: '\u{1d773}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d774}', to: '\u{1d774}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d775}', to: '\u{1d775}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d776}', to: '\u{1d776}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d777}', to: '\u{1d777}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d778}', to: '\u{1d778}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d779}', to: '\u{1d779}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d77a}', to: '\u{1d77a}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d77b}', to: '\u{1d77b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d77c}', to: '\u{1d77c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d77d}', to: '\u{1d77d}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d77e}', to: '\u{1d77e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d77f}', to: '\u{1d77f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d780}', to: '\u{1d780}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d781}', to: '\u{1d782}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d783}', to: '\u{1d783}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d784}', to: '\u{1d784}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d785}', to: '\u{1d785}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d786}', to: '\u{1d786}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d787}', to: '\u{1d787}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d788}', to: '\u{1d788}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d789}', to: '\u{1d789}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d78a}', to: '\u{1d78a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d78b}', to: '\u{1d78b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d78c}', to: '\u{1d78c}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d78d}', to: '\u{1d78d}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d78e}', to: '\u{1d78e}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d78f}', to: '\u{1d78f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d790}', to: '\u{1d790}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d791}', to: '\u{1d791}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d792}', to: '\u{1d792}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d793}', to: '\u{1d793}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d794}', to: '\u{1d794}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d795}', to: '\u{1d795}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d796}', to: '\u{1d796}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d797}', to: '\u{1d797}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d798}', to: '\u{1d798}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d799}', to: '\u{1d799}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79a}', to: '\u{1d79a}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79b}', to: '\u{1d79b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d79c}', to: '\u{1d79c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79d}', to: '\u{1d79d}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79e}', to: '\u{1d79e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d79f}', to: '\u{1d79f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a0}', to: '\u{1d7a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a1}', to: '\u{1d7a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a2}', to: '\u{1d7a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a3}', to: '\u{1d7a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a4}', to: '\u{1d7a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a5}', to: '\u{1d7a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a6}', to: '\u{1d7a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a7}', to: '\u{1d7a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a8}', to: '\u{1d7a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7a9}', to: '\u{1d7a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d7aa}', to: '\u{1d7aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ab}', to: '\u{1d7ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ac}', to: '\u{1d7ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ad}', to: '\u{1d7ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ae}', to: '\u{1d7ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7af}', to: '\u{1d7af}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b0}', to: '\u{1d7b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b1}', to: '\u{1d7b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b2}', to: '\u{1d7b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, + Range { from: '\u{1d7b3}', to: '\u{1d7b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b4}', to: '\u{1d7b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b5}', to: '\u{1d7b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1d7b6}', to: '\u{1d7b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b7}', to: '\u{1d7b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b8}', to: '\u{1d7b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7b9}', to: '\u{1d7b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ba}', to: '\u{1d7ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7bb}', to: '\u{1d7bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7bd}', to: '\u{1d7bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7be}', to: '\u{1d7be}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7bf}', to: '\u{1d7bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c0}', to: '\u{1d7c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c1}', to: '\u{1d7c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c2}', to: '\u{1d7c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c3}', to: '\u{1d7c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, + Range { from: '\u{1d7c4}', to: '\u{1d7c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c5}', to: '\u{1d7c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c6}', to: '\u{1d7c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c7}', to: '\u{1d7c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c8}', to: '\u{1d7c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7c9}', to: '\u{1d7c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7ca}', to: '\u{1d7cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }) }, + Range { from: '\u{1d7cc}', to: '\u{1d7cd}', mapping: Disallowed }, + Range { from: '\u{1d7ce}', to: '\u{1d7ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7cf}', to: '\u{1d7cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7d0}', to: '\u{1d7d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7d1}', to: '\u{1d7d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7d2}', to: '\u{1d7d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d3}', to: '\u{1d7d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d4}', to: '\u{1d7d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d5}', to: '\u{1d7d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d6}', to: '\u{1d7d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d7}', to: '\u{1d7d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d8}', to: '\u{1d7d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7d9}', to: '\u{1d7d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7da}', to: '\u{1d7da}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7db}', to: '\u{1d7db}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7dc}', to: '\u{1d7dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7dd}', to: '\u{1d7dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7de}', to: '\u{1d7de}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7df}', to: '\u{1d7df}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e0}', to: '\u{1d7e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e1}', to: '\u{1d7e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e2}', to: '\u{1d7e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e3}', to: '\u{1d7e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7e4}', to: '\u{1d7e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7e5}', to: '\u{1d7e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7e6}', to: '\u{1d7e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e7}', to: '\u{1d7e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e8}', to: '\u{1d7e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7e9}', to: '\u{1d7e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7ea}', to: '\u{1d7ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7eb}', to: '\u{1d7eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7ec}', to: '\u{1d7ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7ed}', to: '\u{1d7ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7ee}', to: '\u{1d7ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7ef}', to: '\u{1d7ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7f0}', to: '\u{1d7f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f1}', to: '\u{1d7f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f2}', to: '\u{1d7f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f3}', to: '\u{1d7f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f4}', to: '\u{1d7f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f5}', to: '\u{1d7f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f6}', to: '\u{1d7f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7f7}', to: '\u{1d7f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7f8}', to: '\u{1d7f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7f9}', to: '\u{1d7f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1d7fa}', to: '\u{1d7fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7fb}', to: '\u{1d7fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7fc}', to: '\u{1d7fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7fd}', to: '\u{1d7fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7fe}', to: '\u{1d7fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d7ff}', to: '\u{1d7ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, + Range { from: '\u{1d800}', to: '\u{1da8b}', mapping: Valid }, + Range { from: '\u{1da8c}', to: '\u{1da9a}', mapping: Disallowed }, + Range { from: '\u{1da9b}', to: '\u{1da9f}', mapping: Valid }, + Range { from: '\u{1daa0}', to: '\u{1daa0}', mapping: Disallowed }, + Range { from: '\u{1daa1}', to: '\u{1daaf}', mapping: Valid }, + Range { from: '\u{1dab0}', to: '\u{1dfff}', mapping: Disallowed }, + Range { from: '\u{1e000}', to: '\u{1e006}', mapping: Valid }, + Range { from: '\u{1e007}', to: '\u{1e007}', mapping: Disallowed }, + Range { from: '\u{1e008}', to: '\u{1e018}', mapping: Valid }, + Range { from: '\u{1e019}', to: '\u{1e01a}', mapping: Disallowed }, + Range { from: '\u{1e01b}', to: '\u{1e021}', mapping: Valid }, + Range { from: '\u{1e022}', to: '\u{1e022}', mapping: Disallowed }, + Range { from: '\u{1e023}', to: '\u{1e024}', mapping: Valid }, + Range { from: '\u{1e025}', to: '\u{1e025}', mapping: Disallowed }, + Range { from: '\u{1e026}', to: '\u{1e02a}', mapping: Valid }, + Range { from: '\u{1e02b}', to: '\u{1e7ff}', mapping: Disallowed }, + Range { from: '\u{1e800}', to: '\u{1e8c4}', mapping: Valid }, + Range { from: '\u{1e8c5}', to: '\u{1e8c6}', mapping: Disallowed }, + Range { from: '\u{1e8c7}', to: '\u{1e8d6}', mapping: Valid }, + Range { from: '\u{1e8d7}', to: '\u{1e8ff}', mapping: Disallowed }, + Range { from: '\u{1e900}', to: '\u{1e900}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e901}', to: '\u{1e901}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e902}', to: '\u{1e902}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e903}', to: '\u{1e903}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e904}', to: '\u{1e904}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e905}', to: '\u{1e905}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e906}', to: '\u{1e906}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e907}', to: '\u{1e907}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e908}', to: '\u{1e908}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e909}', to: '\u{1e909}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90a}', to: '\u{1e90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90b}', to: '\u{1e90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90c}', to: '\u{1e90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90d}', to: '\u{1e90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90e}', to: '\u{1e90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e90f}', to: '\u{1e90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e910}', to: '\u{1e910}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e911}', to: '\u{1e911}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e912}', to: '\u{1e912}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e913}', to: '\u{1e913}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e914}', to: '\u{1e914}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e915}', to: '\u{1e915}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e916}', to: '\u{1e916}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 41, byte_len: 4 }) }, + Range { from: '\u{1e917}', to: '\u{1e917}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e918}', to: '\u{1e918}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e919}', to: '\u{1e919}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91a}', to: '\u{1e91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91b}', to: '\u{1e91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91c}', to: '\u{1e91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91d}', to: '\u{1e91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91e}', to: '\u{1e91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e91f}', to: '\u{1e91f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e920}', to: '\u{1e920}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e921}', to: '\u{1e921}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 42, byte_len: 4 }) }, + Range { from: '\u{1e922}', to: '\u{1e94a}', mapping: Valid }, + Range { from: '\u{1e94b}', to: '\u{1e94f}', mapping: Disallowed }, + Range { from: '\u{1e950}', to: '\u{1e959}', mapping: Valid }, + Range { from: '\u{1e95a}', to: '\u{1e95d}', mapping: Disallowed }, + Range { from: '\u{1e95e}', to: '\u{1e95f}', mapping: Valid }, + Range { from: '\u{1e960}', to: '\u{1edff}', mapping: Disallowed }, + Range { from: '\u{1ee00}', to: '\u{1ee00}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee01}', to: '\u{1ee01}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee02}', to: '\u{1ee02}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee03}', to: '\u{1ee03}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee04}', to: '\u{1ee04}', mapping: Disallowed }, + Range { from: '\u{1ee05}', to: '\u{1ee05}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee06}', to: '\u{1ee06}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee07}', to: '\u{1ee07}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee08}', to: '\u{1ee08}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee09}', to: '\u{1ee09}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0a}', to: '\u{1ee0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0b}', to: '\u{1ee0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0c}', to: '\u{1ee0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0d}', to: '\u{1ee0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0e}', to: '\u{1ee0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee0f}', to: '\u{1ee0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee10}', to: '\u{1ee10}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee11}', to: '\u{1ee11}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee12}', to: '\u{1ee12}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee13}', to: '\u{1ee13}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee14}', to: '\u{1ee14}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee15}', to: '\u{1ee15}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee16}', to: '\u{1ee16}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee17}', to: '\u{1ee17}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee18}', to: '\u{1ee18}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee19}', to: '\u{1ee19}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee1a}', to: '\u{1ee1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee1b}', to: '\u{1ee1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee1c}', to: '\u{1ee1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee1d}', to: '\u{1ee1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{1ee1e}', to: '\u{1ee1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee1f}', to: '\u{1ee1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee20}', to: '\u{1ee20}', mapping: Disallowed }, + Range { from: '\u{1ee21}', to: '\u{1ee21}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee22}', to: '\u{1ee22}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee23}', to: '\u{1ee23}', mapping: Disallowed }, + Range { from: '\u{1ee24}', to: '\u{1ee24}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee25}', to: '\u{1ee26}', mapping: Disallowed }, + Range { from: '\u{1ee27}', to: '\u{1ee27}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee28}', to: '\u{1ee28}', mapping: Disallowed }, + Range { from: '\u{1ee29}', to: '\u{1ee29}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2a}', to: '\u{1ee2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2b}', to: '\u{1ee2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2c}', to: '\u{1ee2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2d}', to: '\u{1ee2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2e}', to: '\u{1ee2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee2f}', to: '\u{1ee2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee30}', to: '\u{1ee30}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee31}', to: '\u{1ee31}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee32}', to: '\u{1ee32}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee33}', to: '\u{1ee33}', mapping: Disallowed }, + Range { from: '\u{1ee34}', to: '\u{1ee34}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee35}', to: '\u{1ee35}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee36}', to: '\u{1ee36}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee37}', to: '\u{1ee37}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee38}', to: '\u{1ee38}', mapping: Disallowed }, + Range { from: '\u{1ee39}', to: '\u{1ee39}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee3a}', to: '\u{1ee3a}', mapping: Disallowed }, + Range { from: '\u{1ee3b}', to: '\u{1ee3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee3c}', to: '\u{1ee41}', mapping: Disallowed }, + Range { from: '\u{1ee42}', to: '\u{1ee42}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee43}', to: '\u{1ee46}', mapping: Disallowed }, + Range { from: '\u{1ee47}', to: '\u{1ee47}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee48}', to: '\u{1ee48}', mapping: Disallowed }, + Range { from: '\u{1ee49}', to: '\u{1ee49}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee4a}', to: '\u{1ee4a}', mapping: Disallowed }, + Range { from: '\u{1ee4b}', to: '\u{1ee4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee4c}', to: '\u{1ee4c}', mapping: Disallowed }, + Range { from: '\u{1ee4d}', to: '\u{1ee4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee4e}', to: '\u{1ee4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee4f}', to: '\u{1ee4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee50}', to: '\u{1ee50}', mapping: Disallowed }, + Range { from: '\u{1ee51}', to: '\u{1ee51}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee52}', to: '\u{1ee52}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee53}', to: '\u{1ee53}', mapping: Disallowed }, + Range { from: '\u{1ee54}', to: '\u{1ee54}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee55}', to: '\u{1ee56}', mapping: Disallowed }, + Range { from: '\u{1ee57}', to: '\u{1ee57}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee58}', to: '\u{1ee58}', mapping: Disallowed }, + Range { from: '\u{1ee59}', to: '\u{1ee59}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee5a}', to: '\u{1ee5a}', mapping: Disallowed }, + Range { from: '\u{1ee5b}', to: '\u{1ee5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee5c}', to: '\u{1ee5c}', mapping: Disallowed }, + Range { from: '\u{1ee5d}', to: '\u{1ee5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, + Range { from: '\u{1ee5e}', to: '\u{1ee5e}', mapping: Disallowed }, + Range { from: '\u{1ee5f}', to: '\u{1ee5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee60}', to: '\u{1ee60}', mapping: Disallowed }, + Range { from: '\u{1ee61}', to: '\u{1ee61}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee62}', to: '\u{1ee62}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee63}', to: '\u{1ee63}', mapping: Disallowed }, + Range { from: '\u{1ee64}', to: '\u{1ee64}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee65}', to: '\u{1ee66}', mapping: Disallowed }, + Range { from: '\u{1ee67}', to: '\u{1ee67}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee68}', to: '\u{1ee68}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee69}', to: '\u{1ee69}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee6a}', to: '\u{1ee6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee6b}', to: '\u{1ee6b}', mapping: Disallowed }, + Range { from: '\u{1ee6c}', to: '\u{1ee6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee6d}', to: '\u{1ee6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee6e}', to: '\u{1ee6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee6f}', to: '\u{1ee6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee70}', to: '\u{1ee70}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee71}', to: '\u{1ee71}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee72}', to: '\u{1ee72}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee73}', to: '\u{1ee73}', mapping: Disallowed }, + Range { from: '\u{1ee74}', to: '\u{1ee74}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee75}', to: '\u{1ee75}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee76}', to: '\u{1ee76}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee77}', to: '\u{1ee77}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee78}', to: '\u{1ee78}', mapping: Disallowed }, + Range { from: '\u{1ee79}', to: '\u{1ee79}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee7a}', to: '\u{1ee7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee7b}', to: '\u{1ee7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee7c}', to: '\u{1ee7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee7d}', to: '\u{1ee7d}', mapping: Disallowed }, + Range { from: '\u{1ee7e}', to: '\u{1ee7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1ee7f}', to: '\u{1ee7f}', mapping: Disallowed }, + Range { from: '\u{1ee80}', to: '\u{1ee80}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee81}', to: '\u{1ee81}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee82}', to: '\u{1ee82}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee83}', to: '\u{1ee83}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee84}', to: '\u{1ee84}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee85}', to: '\u{1ee85}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee86}', to: '\u{1ee86}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee87}', to: '\u{1ee87}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee88}', to: '\u{1ee88}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee89}', to: '\u{1ee89}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee8a}', to: '\u{1ee8a}', mapping: Disallowed }, + Range { from: '\u{1ee8b}', to: '\u{1ee8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee8c}', to: '\u{1ee8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee8d}', to: '\u{1ee8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee8e}', to: '\u{1ee8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee8f}', to: '\u{1ee8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee90}', to: '\u{1ee90}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee91}', to: '\u{1ee91}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee92}', to: '\u{1ee92}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee93}', to: '\u{1ee93}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee94}', to: '\u{1ee94}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee95}', to: '\u{1ee95}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee96}', to: '\u{1ee96}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1ee97}', to: '\u{1ee97}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee98}', to: '\u{1ee98}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee99}', to: '\u{1ee99}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee9a}', to: '\u{1ee9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee9b}', to: '\u{1ee9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1ee9c}', to: '\u{1eea0}', mapping: Disallowed }, + Range { from: '\u{1eea1}', to: '\u{1eea1}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1eea2}', to: '\u{1eea2}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1eea3}', to: '\u{1eea3}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eea4}', to: '\u{1eea4}', mapping: Disallowed }, + Range { from: '\u{1eea5}', to: '\u{1eea5}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eea6}', to: '\u{1eea6}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eea7}', to: '\u{1eea7}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1eea8}', to: '\u{1eea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eea9}', to: '\u{1eea9}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeaa}', to: '\u{1eeaa}', mapping: Disallowed }, + Range { from: '\u{1eeab}', to: '\u{1eeab}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeac}', to: '\u{1eeac}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eead}', to: '\u{1eead}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeae}', to: '\u{1eeae}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeaf}', to: '\u{1eeaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb0}', to: '\u{1eeb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb1}', to: '\u{1eeb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb2}', to: '\u{1eeb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb3}', to: '\u{1eeb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb4}', to: '\u{1eeb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb5}', to: '\u{1eeb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1eeb6}', to: '\u{1eeb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, + Range { from: '\u{1eeb7}', to: '\u{1eeb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb8}', to: '\u{1eeb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeb9}', to: '\u{1eeb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eeba}', to: '\u{1eeba}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eebb}', to: '\u{1eebb}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, + Range { from: '\u{1eebc}', to: '\u{1eeef}', mapping: Disallowed }, + Range { from: '\u{1eef0}', to: '\u{1eef1}', mapping: Valid }, + Range { from: '\u{1eef2}', to: '\u{1efff}', mapping: Disallowed }, + Range { from: '\u{1f000}', to: '\u{1f02b}', mapping: Valid }, + Range { from: '\u{1f02c}', to: '\u{1f02f}', mapping: Disallowed }, + Range { from: '\u{1f030}', to: '\u{1f093}', mapping: Valid }, + Range { from: '\u{1f094}', to: '\u{1f09f}', mapping: Disallowed }, + Range { from: '\u{1f0a0}', to: '\u{1f0ae}', mapping: Valid }, + Range { from: '\u{1f0af}', to: '\u{1f0b0}', mapping: Disallowed }, + Range { from: '\u{1f0b1}', to: '\u{1f0bf}', mapping: Valid }, + Range { from: '\u{1f0c0}', to: '\u{1f0c0}', mapping: Disallowed }, + Range { from: '\u{1f0c1}', to: '\u{1f0cf}', mapping: Valid }, + Range { from: '\u{1f0d0}', to: '\u{1f0d0}', mapping: Disallowed }, + Range { from: '\u{1f0d1}', to: '\u{1f0f5}', mapping: Valid }, + Range { from: '\u{1f0f6}', to: '\u{1f100}', mapping: Disallowed }, + Range { from: '\u{1f101}', to: '\u{1f101}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f102}', to: '\u{1f102}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f103}', to: '\u{1f103}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f104}', to: '\u{1f104}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f105}', to: '\u{1f105}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f106}', to: '\u{1f106}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f107}', to: '\u{1f107}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f108}', to: '\u{1f108}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f109}', to: '\u{1f109}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f10a}', to: '\u{1f10a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f10b}', to: '\u{1f10c}', mapping: Valid }, + Range { from: '\u{1f10d}', to: '\u{1f10f}', mapping: Disallowed }, + Range { from: '\u{1f110}', to: '\u{1f110}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f111}', to: '\u{1f111}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f112}', to: '\u{1f112}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f113}', to: '\u{1f113}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f114}', to: '\u{1f114}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f115}', to: '\u{1f115}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f116}', to: '\u{1f116}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f117}', to: '\u{1f117}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f118}', to: '\u{1f118}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f119}', to: '\u{1f119}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11a}', to: '\u{1f11a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11b}', to: '\u{1f11b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11c}', to: '\u{1f11c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11d}', to: '\u{1f11d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11e}', to: '\u{1f11e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f11f}', to: '\u{1f11f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f120}', to: '\u{1f120}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f121}', to: '\u{1f121}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }) }, + Range { from: '\u{1f122}', to: '\u{1f122}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f123}', to: '\u{1f123}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f124}', to: '\u{1f124}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f125}', to: '\u{1f125}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f126}', to: '\u{1f126}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f127}', to: '\u{1f127}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f128}', to: '\u{1f128}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f129}', to: '\u{1f129}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }) }, + Range { from: '\u{1f12a}', to: '\u{1f12a}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 42, byte_len: 7 }) }, + Range { from: '\u{1f12b}', to: '\u{1f12b}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f12c}', to: '\u{1f12c}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f12d}', to: '\u{1f12d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{1f12e}', to: '\u{1f12e}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f12f}', to: '\u{1f12f}', mapping: Disallowed }, + Range { from: '\u{1f130}', to: '\u{1f130}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f131}', to: '\u{1f131}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f132}', to: '\u{1f132}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f133}', to: '\u{1f133}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f134}', to: '\u{1f134}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f135}', to: '\u{1f135}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f136}', to: '\u{1f136}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f137}', to: '\u{1f137}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f138}', to: '\u{1f138}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f139}', to: '\u{1f139}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13a}', to: '\u{1f13a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13b}', to: '\u{1f13b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13c}', to: '\u{1f13c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13d}', to: '\u{1f13d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13e}', to: '\u{1f13e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f13f}', to: '\u{1f13f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f140}', to: '\u{1f140}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f141}', to: '\u{1f141}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f142}', to: '\u{1f142}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f143}', to: '\u{1f143}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f144}', to: '\u{1f144}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f145}', to: '\u{1f145}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f146}', to: '\u{1f146}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f147}', to: '\u{1f147}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f148}', to: '\u{1f148}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f149}', to: '\u{1f149}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, + Range { from: '\u{1f14a}', to: '\u{1f14a}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f14b}', to: '\u{1f14b}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, + Range { from: '\u{1f14c}', to: '\u{1f14c}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f14d}', to: '\u{1f14d}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, + Range { from: '\u{1f14e}', to: '\u{1f14e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f14f}', to: '\u{1f14f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f150}', to: '\u{1f169}', mapping: Valid }, + Range { from: '\u{1f16a}', to: '\u{1f16a}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f16b}', to: '\u{1f16b}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f16c}', to: '\u{1f16f}', mapping: Disallowed }, + Range { from: '\u{1f170}', to: '\u{1f18f}', mapping: Valid }, + Range { from: '\u{1f190}', to: '\u{1f190}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 42, byte_len: 2 }) }, + Range { from: '\u{1f191}', to: '\u{1f1ac}', mapping: Valid }, + Range { from: '\u{1f1ad}', to: '\u{1f1e5}', mapping: Disallowed }, + Range { from: '\u{1f1e6}', to: '\u{1f1ff}', mapping: Valid }, + Range { from: '\u{1f200}', to: '\u{1f200}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 42, byte_len: 6 }) }, + Range { from: '\u{1f201}', to: '\u{1f201}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 42, byte_len: 6 }) }, + Range { from: '\u{1f202}', to: '\u{1f202}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{1f203}', to: '\u{1f20f}', mapping: Disallowed }, + Range { from: '\u{1f210}', to: '\u{1f210}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{1f211}', to: '\u{1f211}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f212}', to: '\u{1f212}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f213}', to: '\u{1f213}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f214}', to: '\u{1f214}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{1f215}', to: '\u{1f215}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f216}', to: '\u{1f216}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f217}', to: '\u{1f217}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{1f218}', to: '\u{1f218}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f219}', to: '\u{1f219}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f21a}', to: '\u{1f21a}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f21b}', to: '\u{1f21b}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{1f21c}', to: '\u{1f21c}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f21d}', to: '\u{1f21d}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f21e}', to: '\u{1f21e}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f21f}', to: '\u{1f21f}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f220}', to: '\u{1f220}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f221}', to: '\u{1f221}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f222}', to: '\u{1f222}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{1f223}', to: '\u{1f223}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f224}', to: '\u{1f224}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f225}', to: '\u{1f225}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f226}', to: '\u{1f226}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f227}', to: '\u{1f227}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f228}', to: '\u{1f228}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f229}', to: '\u{1f229}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{1f22a}', to: '\u{1f22a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{1f22b}', to: '\u{1f22b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f22c}', to: '\u{1f22c}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{1f22d}', to: '\u{1f22d}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, + Range { from: '\u{1f22e}', to: '\u{1f22e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{1f22f}', to: '\u{1f22f}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f230}', to: '\u{1f230}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{1f231}', to: '\u{1f231}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f232}', to: '\u{1f232}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f233}', to: '\u{1f233}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f234}', to: '\u{1f234}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f235}', to: '\u{1f235}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f236}', to: '\u{1f236}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }) }, + Range { from: '\u{1f237}', to: '\u{1f237}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{1f238}', to: '\u{1f238}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f239}', to: '\u{1f239}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f23a}', to: '\u{1f23a}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f23b}', to: '\u{1f23b}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{1f23c}', to: '\u{1f23f}', mapping: Disallowed }, + Range { from: '\u{1f240}', to: '\u{1f240}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f241}', to: '\u{1f241}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f242}', to: '\u{1f242}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f243}', to: '\u{1f243}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f244}', to: '\u{1f244}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f245}', to: '\u{1f245}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f246}', to: '\u{1f246}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 42, byte_len: 9 }) }, + Range { from: '\u{1f247}', to: '\u{1f247}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 43, byte_len: 9 }) }, + Range { from: '\u{1f248}', to: '\u{1f248}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 43, byte_len: 9 }) }, + Range { from: '\u{1f249}', to: '\u{1f24f}', mapping: Disallowed }, + Range { from: '\u{1f250}', to: '\u{1f250}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{1f251}', to: '\u{1f251}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{1f252}', to: '\u{1f2ff}', mapping: Disallowed }, + Range { from: '\u{1f300}', to: '\u{1f6d2}', mapping: Valid }, + Range { from: '\u{1f6d3}', to: '\u{1f6df}', mapping: Disallowed }, + Range { from: '\u{1f6e0}', to: '\u{1f6ec}', mapping: Valid }, + Range { from: '\u{1f6ed}', to: '\u{1f6ef}', mapping: Disallowed }, + Range { from: '\u{1f6f0}', to: '\u{1f6f6}', mapping: Valid }, + Range { from: '\u{1f6f7}', to: '\u{1f6ff}', mapping: Disallowed }, + Range { from: '\u{1f700}', to: '\u{1f773}', mapping: Valid }, + Range { from: '\u{1f774}', to: '\u{1f77f}', mapping: Disallowed }, + Range { from: '\u{1f780}', to: '\u{1f7d4}', mapping: Valid }, + Range { from: '\u{1f7d5}', to: '\u{1f7ff}', mapping: Disallowed }, + Range { from: '\u{1f800}', to: '\u{1f80b}', mapping: Valid }, + Range { from: '\u{1f80c}', to: '\u{1f80f}', mapping: Disallowed }, + Range { from: '\u{1f810}', to: '\u{1f847}', mapping: Valid }, + Range { from: '\u{1f848}', to: '\u{1f84f}', mapping: Disallowed }, + Range { from: '\u{1f850}', to: '\u{1f859}', mapping: Valid }, + Range { from: '\u{1f85a}', to: '\u{1f85f}', mapping: Disallowed }, + Range { from: '\u{1f860}', to: '\u{1f887}', mapping: Valid }, + Range { from: '\u{1f888}', to: '\u{1f88f}', mapping: Disallowed }, + Range { from: '\u{1f890}', to: '\u{1f8ad}', mapping: Valid }, + Range { from: '\u{1f8ae}', to: '\u{1f90f}', mapping: Disallowed }, + Range { from: '\u{1f910}', to: '\u{1f91e}', mapping: Valid }, + Range { from: '\u{1f91f}', to: '\u{1f91f}', mapping: Disallowed }, + Range { from: '\u{1f920}', to: '\u{1f927}', mapping: Valid }, + Range { from: '\u{1f928}', to: '\u{1f92f}', mapping: Disallowed }, + Range { from: '\u{1f930}', to: '\u{1f930}', mapping: Valid }, + Range { from: '\u{1f931}', to: '\u{1f932}', mapping: Disallowed }, + Range { from: '\u{1f933}', to: '\u{1f93e}', mapping: Valid }, + Range { from: '\u{1f93f}', to: '\u{1f93f}', mapping: Disallowed }, + Range { from: '\u{1f940}', to: '\u{1f94b}', mapping: Valid }, + Range { from: '\u{1f94c}', to: '\u{1f94f}', mapping: Disallowed }, + Range { from: '\u{1f950}', to: '\u{1f95e}', mapping: Valid }, + Range { from: '\u{1f95f}', to: '\u{1f97f}', mapping: Disallowed }, + Range { from: '\u{1f980}', to: '\u{1f991}', mapping: Valid }, + Range { from: '\u{1f992}', to: '\u{1f9bf}', mapping: Disallowed }, + Range { from: '\u{1f9c0}', to: '\u{1f9c0}', mapping: Valid }, + Range { from: '\u{1f9c1}', to: '\u{1ffff}', mapping: Disallowed }, + Range { from: '\u{20000}', to: '\u{2a6d6}', mapping: Valid }, + Range { from: '\u{2a6d7}', to: '\u{2a6ff}', mapping: Disallowed }, + Range { from: '\u{2a700}', to: '\u{2b734}', mapping: Valid }, + Range { from: '\u{2b735}', to: '\u{2b73f}', mapping: Disallowed }, + Range { from: '\u{2b740}', to: '\u{2b81d}', mapping: Valid }, + Range { from: '\u{2b81e}', to: '\u{2b81f}', mapping: Disallowed }, + Range { from: '\u{2b820}', to: '\u{2cea1}', mapping: Valid }, + Range { from: '\u{2cea2}', to: '\u{2f7ff}', mapping: Disallowed }, + Range { from: '\u{2f800}', to: '\u{2f800}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f801}', to: '\u{2f801}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f802}', to: '\u{2f802}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f803}', to: '\u{2f803}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f804}', to: '\u{2f804}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f805}', to: '\u{2f805}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f806}', to: '\u{2f806}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f807}', to: '\u{2f807}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f808}', to: '\u{2f808}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f809}', to: '\u{2f809}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f80a}', to: '\u{2f80a}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f80b}', to: '\u{2f80b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f80c}', to: '\u{2f80c}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f80d}', to: '\u{2f80d}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f80e}', to: '\u{2f80e}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f80f}', to: '\u{2f80f}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f810}', to: '\u{2f810}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f811}', to: '\u{2f811}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f812}', to: '\u{2f812}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f813}', to: '\u{2f813}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f814}', to: '\u{2f814}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f815}', to: '\u{2f815}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{2f816}', to: '\u{2f816}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f817}', to: '\u{2f817}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f818}', to: '\u{2f818}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f819}', to: '\u{2f819}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f81a}', to: '\u{2f81a}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f81b}', to: '\u{2f81b}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f81c}', to: '\u{2f81c}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f81d}', to: '\u{2f81d}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }) }, + Range { from: '\u{2f81e}', to: '\u{2f81e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f81f}', to: '\u{2f81f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f820}', to: '\u{2f820}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f821}', to: '\u{2f821}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f822}', to: '\u{2f822}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{2f823}', to: '\u{2f823}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f824}', to: '\u{2f824}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f825}', to: '\u{2f825}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f826}', to: '\u{2f826}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f827}', to: '\u{2f827}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f828}', to: '\u{2f828}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f829}', to: '\u{2f829}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f82a}', to: '\u{2f82a}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f82b}', to: '\u{2f82b}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{2f82c}', to: '\u{2f82c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f82d}', to: '\u{2f82d}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f82e}', to: '\u{2f82e}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f82f}', to: '\u{2f82f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f830}', to: '\u{2f830}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f831}', to: '\u{2f833}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f834}', to: '\u{2f834}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f835}', to: '\u{2f835}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f836}', to: '\u{2f836}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f837}', to: '\u{2f837}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f838}', to: '\u{2f838}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f839}', to: '\u{2f839}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83a}', to: '\u{2f83a}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83b}', to: '\u{2f83b}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83c}', to: '\u{2f83c}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83d}', to: '\u{2f83d}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83e}', to: '\u{2f83e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f83f}', to: '\u{2f83f}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f840}', to: '\u{2f840}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f841}', to: '\u{2f841}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f842}', to: '\u{2f842}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f843}', to: '\u{2f843}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f844}', to: '\u{2f844}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f845}', to: '\u{2f846}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f847}', to: '\u{2f847}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f848}', to: '\u{2f848}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f849}', to: '\u{2f849}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f84a}', to: '\u{2f84a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f84b}', to: '\u{2f84b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f84c}', to: '\u{2f84c}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f84d}', to: '\u{2f84d}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f84e}', to: '\u{2f84e}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f84f}', to: '\u{2f84f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f850}', to: '\u{2f850}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{2f851}', to: '\u{2f851}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f852}', to: '\u{2f852}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f853}', to: '\u{2f853}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f854}', to: '\u{2f854}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f855}', to: '\u{2f855}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f856}', to: '\u{2f856}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f857}', to: '\u{2f857}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f858}', to: '\u{2f858}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f859}', to: '\u{2f859}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 43, byte_len: 4 }) }, + Range { from: '\u{2f85a}', to: '\u{2f85a}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 43, byte_len: 3 }) }, + Range { from: '\u{2f85b}', to: '\u{2f85b}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f85c}', to: '\u{2f85c}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f85d}', to: '\u{2f85d}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }) }, + Range { from: '\u{2f85e}', to: '\u{2f85e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f85f}', to: '\u{2f85f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f860}', to: '\u{2f860}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f861}', to: '\u{2f861}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f862}', to: '\u{2f862}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f863}', to: '\u{2f863}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f864}', to: '\u{2f864}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f865}', to: '\u{2f865}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f866}', to: '\u{2f866}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f867}', to: '\u{2f867}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f868}', to: '\u{2f868}', mapping: Disallowed }, + Range { from: '\u{2f869}', to: '\u{2f869}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f86a}', to: '\u{2f86b}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f86c}', to: '\u{2f86c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f86d}', to: '\u{2f86d}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f86e}', to: '\u{2f86e}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f86f}', to: '\u{2f86f}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{2f870}', to: '\u{2f870}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f871}', to: '\u{2f871}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f872}', to: '\u{2f872}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f873}', to: '\u{2f873}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f874}', to: '\u{2f874}', mapping: Disallowed }, + Range { from: '\u{2f875}', to: '\u{2f875}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f876}', to: '\u{2f876}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f877}', to: '\u{2f877}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f878}', to: '\u{2f878}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f879}', to: '\u{2f879}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f87a}', to: '\u{2f87a}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f87b}', to: '\u{2f87b}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f87c}', to: '\u{2f87c}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f87d}', to: '\u{2f87d}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f87e}', to: '\u{2f87e}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f87f}', to: '\u{2f87f}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f880}', to: '\u{2f880}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f881}', to: '\u{2f881}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f882}', to: '\u{2f882}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f883}', to: '\u{2f883}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f884}', to: '\u{2f884}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f885}', to: '\u{2f885}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f886}', to: '\u{2f886}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f887}', to: '\u{2f887}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f888}', to: '\u{2f888}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f889}', to: '\u{2f889}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f88a}', to: '\u{2f88a}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f88b}', to: '\u{2f88b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f88c}', to: '\u{2f88c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f88d}', to: '\u{2f88d}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f88e}', to: '\u{2f88e}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{2f88f}', to: '\u{2f88f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f890}', to: '\u{2f890}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }) }, + Range { from: '\u{2f891}', to: '\u{2f892}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f893}', to: '\u{2f893}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f894}', to: '\u{2f895}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f896}', to: '\u{2f896}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f897}', to: '\u{2f897}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f898}', to: '\u{2f898}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f899}', to: '\u{2f899}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89a}', to: '\u{2f89a}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89b}', to: '\u{2f89b}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89c}', to: '\u{2f89c}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89d}', to: '\u{2f89d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89e}', to: '\u{2f89e}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f89f}', to: '\u{2f89f}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a0}', to: '\u{2f8a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a1}', to: '\u{2f8a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a2}', to: '\u{2f8a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a3}', to: '\u{2f8a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8a4}', to: '\u{2f8a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f8a5}', to: '\u{2f8a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a6}', to: '\u{2f8a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a7}', to: '\u{2f8a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8a8}', to: '\u{2f8a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f8a9}', to: '\u{2f8a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8aa}', to: '\u{2f8aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8ab}', to: '\u{2f8ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8ac}', to: '\u{2f8ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8ad}', to: '\u{2f8ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8ae}', to: '\u{2f8ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8af}', to: '\u{2f8af}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b0}', to: '\u{2f8b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8b1}', to: '\u{2f8b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{2f8b2}', to: '\u{2f8b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b3}', to: '\u{2f8b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b4}', to: '\u{2f8b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b5}', to: '\u{2f8b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b6}', to: '\u{2f8b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b7}', to: '\u{2f8b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8b8}', to: '\u{2f8b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 44, byte_len: 4 }) }, + Range { from: '\u{2f8b9}', to: '\u{2f8b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8ba}', to: '\u{2f8ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8bb}', to: '\u{2f8bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8bc}', to: '\u{2f8bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f8bd}', to: '\u{2f8bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8be}', to: '\u{2f8be}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8bf}', to: '\u{2f8bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c0}', to: '\u{2f8c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c1}', to: '\u{2f8c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c2}', to: '\u{2f8c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c3}', to: '\u{2f8c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c4}', to: '\u{2f8c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c5}', to: '\u{2f8c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c6}', to: '\u{2f8c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c7}', to: '\u{2f8c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8c8}', to: '\u{2f8c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8c9}', to: '\u{2f8c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ca}', to: '\u{2f8ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8cb}', to: '\u{2f8cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8cc}', to: '\u{2f8cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8cd}', to: '\u{2f8cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ce}', to: '\u{2f8ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8cf}', to: '\u{2f8cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8d0}', to: '\u{2f8d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d1}', to: '\u{2f8d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d2}', to: '\u{2f8d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d3}', to: '\u{2f8d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d4}', to: '\u{2f8d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d5}', to: '\u{2f8d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d6}', to: '\u{2f8d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d7}', to: '\u{2f8d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8d8}', to: '\u{2f8d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{2f8d9}', to: '\u{2f8d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f8da}', to: '\u{2f8da}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8db}', to: '\u{2f8db}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8dc}', to: '\u{2f8dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8dd}', to: '\u{2f8dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8de}', to: '\u{2f8de}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8df}', to: '\u{2f8df}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e0}', to: '\u{2f8e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e1}', to: '\u{2f8e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e2}', to: '\u{2f8e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f8e3}', to: '\u{2f8e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8e4}', to: '\u{2f8e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e5}', to: '\u{2f8e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e6}', to: '\u{2f8e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e7}', to: '\u{2f8e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f8e8}', to: '\u{2f8e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8e9}', to: '\u{2f8e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ea}', to: '\u{2f8ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8eb}', to: '\u{2f8eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ec}', to: '\u{2f8ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8ed}', to: '\u{2f8ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ee}', to: '\u{2f8ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ef}', to: '\u{2f8ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f0}', to: '\u{2f8f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8f1}', to: '\u{2f8f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f2}', to: '\u{2f8f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f3}', to: '\u{2f8f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f4}', to: '\u{2f8f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f5}', to: '\u{2f8f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{2f8f6}', to: '\u{2f8f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8f7}', to: '\u{2f8f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8f8}', to: '\u{2f8f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8f9}', to: '\u{2f8f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8fa}', to: '\u{2f8fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8fb}', to: '\u{2f8fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f8fc}', to: '\u{2f8fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8fd}', to: '\u{2f8fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8fe}', to: '\u{2f8fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f8ff}', to: '\u{2f8ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f900}', to: '\u{2f900}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f901}', to: '\u{2f901}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f902}', to: '\u{2f902}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, + Range { from: '\u{2f903}', to: '\u{2f903}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f904}', to: '\u{2f904}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f905}', to: '\u{2f905}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f906}', to: '\u{2f906}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f907}', to: '\u{2f907}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f908}', to: '\u{2f908}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f909}', to: '\u{2f909}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f90a}', to: '\u{2f90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f90b}', to: '\u{2f90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f90c}', to: '\u{2f90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f90d}', to: '\u{2f90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f90e}', to: '\u{2f90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f90f}', to: '\u{2f90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f910}', to: '\u{2f910}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f911}', to: '\u{2f911}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 45, byte_len: 4 }) }, + Range { from: '\u{2f912}', to: '\u{2f912}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f913}', to: '\u{2f913}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f914}', to: '\u{2f914}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f915}', to: '\u{2f915}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f916}', to: '\u{2f916}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f917}', to: '\u{2f917}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f918}', to: '\u{2f918}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 45, byte_len: 3 }) }, + Range { from: '\u{2f919}', to: '\u{2f919}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f91a}', to: '\u{2f91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f91b}', to: '\u{2f91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f91c}', to: '\u{2f91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f91d}', to: '\u{2f91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f91e}', to: '\u{2f91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f91f}', to: '\u{2f91f}', mapping: Disallowed }, + Range { from: '\u{2f920}', to: '\u{2f920}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f921}', to: '\u{2f921}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f922}', to: '\u{2f922}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f923}', to: '\u{2f923}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f924}', to: '\u{2f924}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f925}', to: '\u{2f925}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f926}', to: '\u{2f926}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f927}', to: '\u{2f927}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f928}', to: '\u{2f928}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f929}', to: '\u{2f929}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f92a}', to: '\u{2f92a}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f92b}', to: '\u{2f92b}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f92c}', to: '\u{2f92d}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f92e}', to: '\u{2f92e}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f92f}', to: '\u{2f92f}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f930}', to: '\u{2f930}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f931}', to: '\u{2f931}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f932}', to: '\u{2f932}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f933}', to: '\u{2f933}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f934}', to: '\u{2f934}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f935}', to: '\u{2f935}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f936}', to: '\u{2f936}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f937}', to: '\u{2f937}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f938}', to: '\u{2f938}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{2f939}', to: '\u{2f939}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f93a}', to: '\u{2f93a}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f93b}', to: '\u{2f93b}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f93c}', to: '\u{2f93c}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f93d}', to: '\u{2f93d}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f93e}', to: '\u{2f93e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f93f}', to: '\u{2f93f}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f940}', to: '\u{2f940}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f941}', to: '\u{2f941}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f942}', to: '\u{2f942}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f943}', to: '\u{2f943}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f944}', to: '\u{2f944}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f945}', to: '\u{2f945}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f946}', to: '\u{2f947}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f948}', to: '\u{2f948}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f949}', to: '\u{2f949}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f94a}', to: '\u{2f94a}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f94b}', to: '\u{2f94b}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f94c}', to: '\u{2f94c}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f94d}', to: '\u{2f94d}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f94e}', to: '\u{2f94e}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f94f}', to: '\u{2f94f}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{2f950}', to: '\u{2f950}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f951}', to: '\u{2f951}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f952}', to: '\u{2f952}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f953}', to: '\u{2f953}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f954}', to: '\u{2f954}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f955}', to: '\u{2f955}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f956}', to: '\u{2f956}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f957}', to: '\u{2f957}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f958}', to: '\u{2f958}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f959}', to: '\u{2f959}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f95a}', to: '\u{2f95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f95b}', to: '\u{2f95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f95c}', to: '\u{2f95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f95d}', to: '\u{2f95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f95f}', to: '\u{2f95f}', mapping: Disallowed }, + Range { from: '\u{2f960}', to: '\u{2f960}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f961}', to: '\u{2f961}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f962}', to: '\u{2f962}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f963}', to: '\u{2f963}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f964}', to: '\u{2f964}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f965}', to: '\u{2f965}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f966}', to: '\u{2f966}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f967}', to: '\u{2f967}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f968}', to: '\u{2f968}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f969}', to: '\u{2f969}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f96a}', to: '\u{2f96a}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f96b}', to: '\u{2f96b}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f96c}', to: '\u{2f96c}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f96d}', to: '\u{2f96d}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f96e}', to: '\u{2f96e}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f96f}', to: '\u{2f96f}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f970}', to: '\u{2f970}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f971}', to: '\u{2f971}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f972}', to: '\u{2f972}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f973}', to: '\u{2f973}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f974}', to: '\u{2f974}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 46, byte_len: 3 }) }, + Range { from: '\u{2f975}', to: '\u{2f975}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 46, byte_len: 4 }) }, + Range { from: '\u{2f976}', to: '\u{2f976}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f977}', to: '\u{2f977}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f978}', to: '\u{2f978}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f979}', to: '\u{2f979}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f97a}', to: '\u{2f97a}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f97b}', to: '\u{2f97b}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f97c}', to: '\u{2f97c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f97d}', to: '\u{2f97d}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f97e}', to: '\u{2f97e}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f97f}', to: '\u{2f97f}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f980}', to: '\u{2f980}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f981}', to: '\u{2f981}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f982}', to: '\u{2f982}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f983}', to: '\u{2f983}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f984}', to: '\u{2f984}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f985}', to: '\u{2f985}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f986}', to: '\u{2f986}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f987}', to: '\u{2f987}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f988}', to: '\u{2f988}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f989}', to: '\u{2f989}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f98a}', to: '\u{2f98a}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f98b}', to: '\u{2f98b}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }) }, + Range { from: '\u{2f98c}', to: '\u{2f98c}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f98d}', to: '\u{2f98d}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f98e}', to: '\u{2f98e}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f98f}', to: '\u{2f98f}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f990}', to: '\u{2f990}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f991}', to: '\u{2f991}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f992}', to: '\u{2f992}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f993}', to: '\u{2f993}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f994}', to: '\u{2f994}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f995}', to: '\u{2f995}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f996}', to: '\u{2f996}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f997}', to: '\u{2f997}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f998}', to: '\u{2f998}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }) }, + Range { from: '\u{2f999}', to: '\u{2f999}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99a}', to: '\u{2f99a}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99b}', to: '\u{2f99b}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99c}', to: '\u{2f99c}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99d}', to: '\u{2f99d}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99e}', to: '\u{2f99e}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f99f}', to: '\u{2f99f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }) }, + Range { from: '\u{2f9a0}', to: '\u{2f9a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a1}', to: '\u{2f9a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a2}', to: '\u{2f9a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a3}', to: '\u{2f9a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a4}', to: '\u{2f9a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9a5}', to: '\u{2f9a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9a6}', to: '\u{2f9a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9a7}', to: '\u{2f9a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a8}', to: '\u{2f9a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9a9}', to: '\u{2f9a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9aa}', to: '\u{2f9aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9ab}', to: '\u{2f9ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9ac}', to: '\u{2f9ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9ad}', to: '\u{2f9ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9ae}', to: '\u{2f9ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9af}', to: '\u{2f9af}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b0}', to: '\u{2f9b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9b1}', to: '\u{2f9b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9b2}', to: '\u{2f9b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b3}', to: '\u{2f9b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b4}', to: '\u{2f9b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }) }, + Range { from: '\u{2f9b5}', to: '\u{2f9b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b6}', to: '\u{2f9b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b7}', to: '\u{2f9b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b8}', to: '\u{2f9b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9b9}', to: '\u{2f9b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9ba}', to: '\u{2f9ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9bb}', to: '\u{2f9bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f9bc}', to: '\u{2f9bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9bd}', to: '\u{2f9bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9be}', to: '\u{2f9be}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9bf}', to: '\u{2f9bf}', mapping: Disallowed }, + Range { from: '\u{2f9c0}', to: '\u{2f9c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c1}', to: '\u{2f9c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c2}', to: '\u{2f9c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c3}', to: '\u{2f9c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c4}', to: '\u{2f9c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9c5}', to: '\u{2f9c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9c6}', to: '\u{2f9c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c7}', to: '\u{2f9c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c8}', to: '\u{2f9c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9c9}', to: '\u{2f9c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9ca}', to: '\u{2f9ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 47, byte_len: 3 }) }, + Range { from: '\u{2f9cb}', to: '\u{2f9cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 47, byte_len: 4 }) }, + Range { from: '\u{2f9cc}', to: '\u{2f9cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9cd}', to: '\u{2f9cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9ce}', to: '\u{2f9ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9cf}', to: '\u{2f9cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9d0}', to: '\u{2f9d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f9d1}', to: '\u{2f9d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f9d2}', to: '\u{2f9d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2f9d3}', to: '\u{2f9d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9d4}', to: '\u{2f9d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9d5}', to: '\u{2f9d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9d6}', to: '\u{2f9d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9d7}', to: '\u{2f9d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9d8}', to: '\u{2f9d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9d9}', to: '\u{2f9d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9da}', to: '\u{2f9da}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9db}', to: '\u{2f9db}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9dc}', to: '\u{2f9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9dd}', to: '\u{2f9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9de}', to: '\u{2f9de}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9df}', to: '\u{2f9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2f9e0}', to: '\u{2f9e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9e1}', to: '\u{2f9e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9e2}', to: '\u{2f9e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e3}', to: '\u{2f9e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e4}', to: '\u{2f9e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e5}', to: '\u{2f9e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9e6}', to: '\u{2f9e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e7}', to: '\u{2f9e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e8}', to: '\u{2f9e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9e9}', to: '\u{2f9e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9ea}', to: '\u{2f9ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9eb}', to: '\u{2f9eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9ec}', to: '\u{2f9ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9ed}', to: '\u{2f9ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9ee}', to: '\u{2f9ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9ef}', to: '\u{2f9ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f0}', to: '\u{2f9f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f1}', to: '\u{2f9f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9f2}', to: '\u{2f9f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f3}', to: '\u{2f9f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f4}', to: '\u{2f9f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f5}', to: '\u{2f9f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f6}', to: '\u{2f9f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9f7}', to: '\u{2f9f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9f8}', to: '\u{2f9f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9f9}', to: '\u{2f9f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9fa}', to: '\u{2f9fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9fb}', to: '\u{2f9fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9fc}', to: '\u{2f9fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2f9fd}', to: '\u{2f9fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2f9fe}', to: '\u{2f9ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2fa00}', to: '\u{2fa00}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa01}', to: '\u{2fa01}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa02}', to: '\u{2fa02}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa03}', to: '\u{2fa03}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa04}', to: '\u{2fa04}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa05}', to: '\u{2fa05}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa06}', to: '\u{2fa06}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa07}', to: '\u{2fa07}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa08}', to: '\u{2fa08}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa09}', to: '\u{2fa09}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa0a}', to: '\u{2fa0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }) }, + Range { from: '\u{2fa0b}', to: '\u{2fa0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa0c}', to: '\u{2fa0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa0d}', to: '\u{2fa0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa0e}', to: '\u{2fa0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa0f}', to: '\u{2fa0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa10}', to: '\u{2fa10}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa11}', to: '\u{2fa11}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa12}', to: '\u{2fa12}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa13}', to: '\u{2fa13}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa14}', to: '\u{2fa14}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa15}', to: '\u{2fa15}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa16}', to: '\u{2fa16}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa17}', to: '\u{2fa17}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa18}', to: '\u{2fa18}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa19}', to: '\u{2fa19}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa1a}', to: '\u{2fa1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa1b}', to: '\u{2fa1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 48, byte_len: 3 }) }, + Range { from: '\u{2fa1c}', to: '\u{2fa1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }) }, + Range { from: '\u{2fa1d}', to: '\u{2fa1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 48, byte_len: 4 }) }, + Range { from: '\u{2fa1e}', to: '\u{e00ff}', mapping: Disallowed }, + Range { from: '\u{e0100}', to: '\u{e01ef}', mapping: Ignored }, + Range { from: '\u{e01f0}', to: '\u{10ffff}', mapping: Disallowed }, +]; + +static STRING_TABLE: &'static str = "\u{61}\ + \u{62}\ + \u{63}\ + \u{64}\ + \u{65}\ + \u{66}\ + \u{67}\ + \u{68}\ + \u{69}\ + \u{6a}\ + \u{6b}\ + \u{6c}\ + \u{6d}\ + \u{6e}\ + \u{6f}\ + \u{70}\ + \u{71}\ + \u{72}\ + \u{73}\ + \u{74}\ + \u{75}\ + \u{76}\ + \u{77}\ + \u{78}\ + \u{79}\ + \u{7a}\ + \u{20}\ + \u{20}\ + \u{308}\ + \u{20}\ + \u{304}\ + \u{32}\ + \u{33}\ + \u{20}\ + \u{301}\ + \u{3bc}\ + \u{20}\ + \u{327}\ + \u{31}\ + \u{31}\ + \u{2044}\ + \u{34}\ + \u{31}\ + \u{2044}\ + \u{32}\ + \u{33}\ + \u{2044}\ + \u{34}\ + \u{e0}\ + \u{e1}\ + \u{e2}\ + \u{e3}\ + \u{e4}\ + \u{e5}\ + \u{e6}\ + \u{e7}\ + \u{e8}\ + \u{e9}\ + \u{ea}\ + \u{eb}\ + \u{ec}\ + \u{ed}\ + \u{ee}\ + \u{ef}\ + \u{f0}\ + \u{f1}\ + \u{f2}\ + \u{f3}\ + \u{f4}\ + \u{f5}\ + \u{f6}\ + \u{f8}\ + \u{f9}\ + \u{fa}\ + \u{fb}\ + \u{fc}\ + \u{fd}\ + \u{fe}\ + \u{73}\ + \u{73}\ + \u{101}\ + \u{103}\ + \u{105}\ + \u{107}\ + \u{109}\ + \u{10b}\ + \u{10d}\ + \u{10f}\ + \u{111}\ + \u{113}\ + \u{115}\ + \u{117}\ + \u{119}\ + \u{11b}\ + \u{11d}\ + \u{11f}\ + \u{121}\ + \u{123}\ + \u{125}\ + \u{127}\ + \u{129}\ + \u{12b}\ + \u{12d}\ + \u{12f}\ + \u{69}\ + \u{307}\ + \u{69}\ + \u{6a}\ + \u{135}\ + \u{137}\ + \u{13a}\ + \u{13c}\ + \u{13e}\ + \u{6c}\ + \u{b7}\ + \u{142}\ + \u{144}\ + \u{146}\ + \u{148}\ + \u{2bc}\ + \u{6e}\ + \u{14b}\ + \u{14d}\ + \u{14f}\ + \u{151}\ + \u{153}\ + \u{155}\ + \u{157}\ + \u{159}\ + \u{15b}\ + \u{15d}\ + \u{15f}\ + \u{161}\ + \u{163}\ + \u{165}\ + \u{167}\ + \u{169}\ + \u{16b}\ + \u{16d}\ + \u{16f}\ + \u{171}\ + \u{173}\ + \u{175}\ + \u{177}\ + \u{ff}\ + \u{17a}\ + \u{17c}\ + \u{17e}\ + \u{253}\ + \u{183}\ + \u{185}\ + \u{254}\ + \u{188}\ + \u{256}\ + \u{257}\ + \u{18c}\ + \u{1dd}\ + \u{259}\ + \u{25b}\ + \u{192}\ + \u{260}\ + \u{263}\ + \u{269}\ + \u{268}\ + \u{199}\ + \u{26f}\ + \u{272}\ + \u{275}\ + \u{1a1}\ + \u{1a3}\ + \u{1a5}\ + \u{280}\ + \u{1a8}\ + \u{283}\ + \u{1ad}\ + \u{288}\ + \u{1b0}\ + \u{28a}\ + \u{28b}\ + \u{1b4}\ + \u{1b6}\ + \u{292}\ + \u{1b9}\ + \u{1bd}\ + \u{64}\ + \u{17e}\ + \u{6c}\ + \u{6a}\ + \u{6e}\ + \u{6a}\ + \u{1ce}\ + \u{1d0}\ + \u{1d2}\ + \u{1d4}\ + \u{1d6}\ + \u{1d8}\ + \u{1da}\ + \u{1dc}\ + \u{1df}\ + \u{1e1}\ + \u{1e3}\ + \u{1e5}\ + \u{1e7}\ + \u{1e9}\ + \u{1eb}\ + \u{1ed}\ + \u{1ef}\ + \u{64}\ + \u{7a}\ + \u{1f5}\ + \u{195}\ + \u{1bf}\ + \u{1f9}\ + \u{1fb}\ + \u{1fd}\ + \u{1ff}\ + \u{201}\ + \u{203}\ + \u{205}\ + \u{207}\ + \u{209}\ + \u{20b}\ + \u{20d}\ + \u{20f}\ + \u{211}\ + \u{213}\ + \u{215}\ + \u{217}\ + \u{219}\ + \u{21b}\ + \u{21d}\ + \u{21f}\ + \u{19e}\ + \u{223}\ + \u{225}\ + \u{227}\ + \u{229}\ + \u{22b}\ + \u{22d}\ + \u{22f}\ + \u{231}\ + \u{233}\ + \u{2c65}\ + \u{23c}\ + \u{19a}\ + \u{2c66}\ + \u{242}\ + \u{180}\ + \u{289}\ + \u{28c}\ + \u{247}\ + \u{249}\ + \u{24b}\ + \u{24d}\ + \u{24f}\ + \u{266}\ + \u{279}\ + \u{27b}\ + \u{281}\ + \u{20}\ + \u{306}\ + \u{20}\ + \u{307}\ + \u{20}\ + \u{30a}\ + \u{20}\ + \u{328}\ + \u{20}\ + \u{303}\ + \u{20}\ + \u{30b}\ + \u{295}\ + \u{300}\ + \u{301}\ + \u{313}\ + \u{308}\ + \u{301}\ + \u{3b9}\ + \u{371}\ + \u{373}\ + \u{2b9}\ + \u{377}\ + \u{20}\ + \u{3b9}\ + \u{3b}\ + \u{3f3}\ + \u{20}\ + \u{308}\ + \u{301}\ + \u{3ac}\ + \u{b7}\ + \u{3ad}\ + \u{3ae}\ + \u{3af}\ + \u{3cc}\ + \u{3cd}\ + \u{3ce}\ + \u{3b1}\ + \u{3b2}\ + \u{3b3}\ + \u{3b4}\ + \u{3b5}\ + \u{3b6}\ + \u{3b7}\ + \u{3b8}\ + \u{3ba}\ + \u{3bb}\ + \u{3bd}\ + \u{3be}\ + \u{3bf}\ + \u{3c0}\ + \u{3c1}\ + \u{3c3}\ + \u{3c4}\ + \u{3c5}\ + \u{3c6}\ + \u{3c7}\ + \u{3c8}\ + \u{3c9}\ + \u{3ca}\ + \u{3cb}\ + \u{3d7}\ + \u{3d9}\ + \u{3db}\ + \u{3dd}\ + \u{3df}\ + \u{3e1}\ + \u{3e3}\ + \u{3e5}\ + \u{3e7}\ + \u{3e9}\ + \u{3eb}\ + \u{3ed}\ + \u{3ef}\ + \u{3f8}\ + \u{3fb}\ + \u{37b}\ + \u{37c}\ + \u{37d}\ + \u{450}\ + \u{451}\ + \u{452}\ + \u{453}\ + \u{454}\ + \u{455}\ + \u{456}\ + \u{457}\ + \u{458}\ + \u{459}\ + \u{45a}\ + \u{45b}\ + \u{45c}\ + \u{45d}\ + \u{45e}\ + \u{45f}\ + \u{430}\ + \u{431}\ + \u{432}\ + \u{433}\ + \u{434}\ + \u{435}\ + \u{436}\ + \u{437}\ + \u{438}\ + \u{439}\ + \u{43a}\ + \u{43b}\ + \u{43c}\ + \u{43d}\ + \u{43e}\ + \u{43f}\ + \u{440}\ + \u{441}\ + \u{442}\ + \u{443}\ + \u{444}\ + \u{445}\ + \u{446}\ + \u{447}\ + \u{448}\ + \u{449}\ + \u{44a}\ + \u{44b}\ + \u{44c}\ + \u{44d}\ + \u{44e}\ + \u{44f}\ + \u{461}\ + \u{463}\ + \u{465}\ + \u{467}\ + \u{469}\ + \u{46b}\ + \u{46d}\ + \u{46f}\ + \u{471}\ + \u{473}\ + \u{475}\ + \u{477}\ + \u{479}\ + \u{47b}\ + \u{47d}\ + \u{47f}\ + \u{481}\ + \u{48b}\ + \u{48d}\ + \u{48f}\ + \u{491}\ + \u{493}\ + \u{495}\ + \u{497}\ + \u{499}\ + \u{49b}\ + \u{49d}\ + \u{49f}\ + \u{4a1}\ + \u{4a3}\ + \u{4a5}\ + \u{4a7}\ + \u{4a9}\ + \u{4ab}\ + \u{4ad}\ + \u{4af}\ + \u{4b1}\ + \u{4b3}\ + \u{4b5}\ + \u{4b7}\ + \u{4b9}\ + \u{4bb}\ + \u{4bd}\ + \u{4bf}\ + \u{4c2}\ + \u{4c4}\ + \u{4c6}\ + \u{4c8}\ + \u{4ca}\ + \u{4cc}\ + \u{4ce}\ + \u{4d1}\ + \u{4d3}\ + \u{4d5}\ + \u{4d7}\ + \u{4d9}\ + \u{4db}\ + \u{4dd}\ + \u{4df}\ + \u{4e1}\ + \u{4e3}\ + \u{4e5}\ + \u{4e7}\ + \u{4e9}\ + \u{4eb}\ + \u{4ed}\ + \u{4ef}\ + \u{4f1}\ + \u{4f3}\ + \u{4f5}\ + \u{4f7}\ + \u{4f9}\ + \u{4fb}\ + \u{4fd}\ + \u{4ff}\ + \u{501}\ + \u{503}\ + \u{505}\ + \u{507}\ + \u{509}\ + \u{50b}\ + \u{50d}\ + \u{50f}\ + \u{511}\ + \u{513}\ + \u{515}\ + \u{517}\ + \u{519}\ + \u{51b}\ + \u{51d}\ + \u{51f}\ + \u{521}\ + \u{523}\ + \u{525}\ + \u{527}\ + \u{529}\ + \u{52b}\ + \u{52d}\ + \u{52f}\ + \u{561}\ + \u{562}\ + \u{563}\ + \u{564}\ + \u{565}\ + \u{566}\ + \u{567}\ + \u{568}\ + \u{569}\ + \u{56a}\ + \u{56b}\ + \u{56c}\ + \u{56d}\ + \u{56e}\ + \u{56f}\ + \u{570}\ + \u{571}\ + \u{572}\ + \u{573}\ + \u{574}\ + \u{575}\ + \u{576}\ + \u{577}\ + \u{578}\ + \u{579}\ + \u{57a}\ + \u{57b}\ + \u{57c}\ + \u{57d}\ + \u{57e}\ + \u{57f}\ + \u{580}\ + \u{581}\ + \u{582}\ + \u{583}\ + \u{584}\ + \u{585}\ + \u{586}\ + \u{565}\ + \u{582}\ + \u{627}\ + \u{674}\ + \u{648}\ + \u{674}\ + \u{6c7}\ + \u{674}\ + \u{64a}\ + \u{674}\ + \u{915}\ + \u{93c}\ + \u{916}\ + \u{93c}\ + \u{917}\ + \u{93c}\ + \u{91c}\ + \u{93c}\ + \u{921}\ + \u{93c}\ + \u{922}\ + \u{93c}\ + \u{92b}\ + \u{93c}\ + \u{92f}\ + \u{93c}\ + \u{9a1}\ + \u{9bc}\ + \u{9a2}\ + \u{9bc}\ + \u{9af}\ + \u{9bc}\ + \u{a32}\ + \u{a3c}\ + \u{a38}\ + \u{a3c}\ + \u{a16}\ + \u{a3c}\ + \u{a17}\ + \u{a3c}\ + \u{a1c}\ + \u{a3c}\ + \u{a2b}\ + \u{a3c}\ + \u{b21}\ + \u{b3c}\ + \u{b22}\ + \u{b3c}\ + \u{e4d}\ + \u{e32}\ + \u{ecd}\ + \u{eb2}\ + \u{eab}\ + \u{e99}\ + \u{eab}\ + \u{ea1}\ + \u{f0b}\ + \u{f42}\ + \u{fb7}\ + \u{f4c}\ + \u{fb7}\ + \u{f51}\ + \u{fb7}\ + \u{f56}\ + \u{fb7}\ + \u{f5b}\ + \u{fb7}\ + \u{f40}\ + \u{fb5}\ + \u{f71}\ + \u{f72}\ + \u{f71}\ + \u{f74}\ + \u{fb2}\ + \u{f80}\ + \u{fb2}\ + \u{f71}\ + \u{f80}\ + \u{fb3}\ + \u{f80}\ + \u{fb3}\ + \u{f71}\ + \u{f80}\ + \u{f71}\ + \u{f80}\ + \u{f92}\ + \u{fb7}\ + \u{f9c}\ + \u{fb7}\ + \u{fa1}\ + \u{fb7}\ + \u{fa6}\ + \u{fb7}\ + \u{fab}\ + \u{fb7}\ + \u{f90}\ + \u{fb5}\ + \u{2d27}\ + \u{2d2d}\ + \u{10dc}\ + \u{13f0}\ + \u{13f1}\ + \u{13f2}\ + \u{13f3}\ + \u{13f4}\ + \u{13f5}\ + \u{a64b}\ + \u{250}\ + \u{251}\ + \u{1d02}\ + \u{25c}\ + \u{1d16}\ + \u{1d17}\ + \u{1d1d}\ + \u{1d25}\ + \u{252}\ + \u{255}\ + \u{25f}\ + \u{261}\ + \u{265}\ + \u{26a}\ + \u{1d7b}\ + \u{29d}\ + \u{26d}\ + \u{1d85}\ + \u{29f}\ + \u{271}\ + \u{270}\ + \u{273}\ + \u{274}\ + \u{278}\ + \u{282}\ + \u{1ab}\ + \u{1d1c}\ + \u{290}\ + \u{291}\ + \u{1e01}\ + \u{1e03}\ + \u{1e05}\ + \u{1e07}\ + \u{1e09}\ + \u{1e0b}\ + \u{1e0d}\ + \u{1e0f}\ + \u{1e11}\ + \u{1e13}\ + \u{1e15}\ + \u{1e17}\ + \u{1e19}\ + \u{1e1b}\ + \u{1e1d}\ + \u{1e1f}\ + \u{1e21}\ + \u{1e23}\ + \u{1e25}\ + \u{1e27}\ + \u{1e29}\ + \u{1e2b}\ + \u{1e2d}\ + \u{1e2f}\ + \u{1e31}\ + \u{1e33}\ + \u{1e35}\ + \u{1e37}\ + \u{1e39}\ + \u{1e3b}\ + \u{1e3d}\ + \u{1e3f}\ + \u{1e41}\ + \u{1e43}\ + \u{1e45}\ + \u{1e47}\ + \u{1e49}\ + \u{1e4b}\ + \u{1e4d}\ + \u{1e4f}\ + \u{1e51}\ + \u{1e53}\ + \u{1e55}\ + \u{1e57}\ + \u{1e59}\ + \u{1e5b}\ + \u{1e5d}\ + \u{1e5f}\ + \u{1e61}\ + \u{1e63}\ + \u{1e65}\ + \u{1e67}\ + \u{1e69}\ + \u{1e6b}\ + \u{1e6d}\ + \u{1e6f}\ + \u{1e71}\ + \u{1e73}\ + \u{1e75}\ + \u{1e77}\ + \u{1e79}\ + \u{1e7b}\ + \u{1e7d}\ + \u{1e7f}\ + \u{1e81}\ + \u{1e83}\ + \u{1e85}\ + \u{1e87}\ + \u{1e89}\ + \u{1e8b}\ + \u{1e8d}\ + \u{1e8f}\ + \u{1e91}\ + \u{1e93}\ + \u{1e95}\ + \u{61}\ + \u{2be}\ + \u{1ea1}\ + \u{1ea3}\ + \u{1ea5}\ + \u{1ea7}\ + \u{1ea9}\ + \u{1eab}\ + \u{1ead}\ + \u{1eaf}\ + \u{1eb1}\ + \u{1eb3}\ + \u{1eb5}\ + \u{1eb7}\ + \u{1eb9}\ + \u{1ebb}\ + \u{1ebd}\ + \u{1ebf}\ + \u{1ec1}\ + \u{1ec3}\ + \u{1ec5}\ + \u{1ec7}\ + \u{1ec9}\ + \u{1ecb}\ + \u{1ecd}\ + \u{1ecf}\ + \u{1ed1}\ + \u{1ed3}\ + \u{1ed5}\ + \u{1ed7}\ + \u{1ed9}\ + \u{1edb}\ + \u{1edd}\ + \u{1edf}\ + \u{1ee1}\ + \u{1ee3}\ + \u{1ee5}\ + \u{1ee7}\ + \u{1ee9}\ + \u{1eeb}\ + \u{1eed}\ + \u{1eef}\ + \u{1ef1}\ + \u{1ef3}\ + \u{1ef5}\ + \u{1ef7}\ + \u{1ef9}\ + \u{1efb}\ + \u{1efd}\ + \u{1eff}\ + \u{1f00}\ + \u{1f01}\ + \u{1f02}\ + \u{1f03}\ + \u{1f04}\ + \u{1f05}\ + \u{1f06}\ + \u{1f07}\ + \u{1f10}\ + \u{1f11}\ + \u{1f12}\ + \u{1f13}\ + \u{1f14}\ + \u{1f15}\ + \u{1f20}\ + \u{1f21}\ + \u{1f22}\ + \u{1f23}\ + \u{1f24}\ + \u{1f25}\ + \u{1f26}\ + \u{1f27}\ + \u{1f30}\ + \u{1f31}\ + \u{1f32}\ + \u{1f33}\ + \u{1f34}\ + \u{1f35}\ + \u{1f36}\ + \u{1f37}\ + \u{1f40}\ + \u{1f41}\ + \u{1f42}\ + \u{1f43}\ + \u{1f44}\ + \u{1f45}\ + \u{1f51}\ + \u{1f53}\ + \u{1f55}\ + \u{1f57}\ + \u{1f60}\ + \u{1f61}\ + \u{1f62}\ + \u{1f63}\ + \u{1f64}\ + \u{1f65}\ + \u{1f66}\ + \u{1f67}\ + \u{1f00}\ + \u{3b9}\ + \u{1f01}\ + \u{3b9}\ + \u{1f02}\ + \u{3b9}\ + \u{1f03}\ + \u{3b9}\ + \u{1f04}\ + \u{3b9}\ + \u{1f05}\ + \u{3b9}\ + \u{1f06}\ + \u{3b9}\ + \u{1f07}\ + \u{3b9}\ + \u{1f20}\ + \u{3b9}\ + \u{1f21}\ + \u{3b9}\ + \u{1f22}\ + \u{3b9}\ + \u{1f23}\ + \u{3b9}\ + \u{1f24}\ + \u{3b9}\ + \u{1f25}\ + \u{3b9}\ + \u{1f26}\ + \u{3b9}\ + \u{1f27}\ + \u{3b9}\ + \u{1f60}\ + \u{3b9}\ + \u{1f61}\ + \u{3b9}\ + \u{1f62}\ + \u{3b9}\ + \u{1f63}\ + \u{3b9}\ + \u{1f64}\ + \u{3b9}\ + \u{1f65}\ + \u{3b9}\ + \u{1f66}\ + \u{3b9}\ + \u{1f67}\ + \u{3b9}\ + \u{1f70}\ + \u{3b9}\ + \u{3b1}\ + \u{3b9}\ + \u{3ac}\ + \u{3b9}\ + \u{1fb6}\ + \u{3b9}\ + \u{1fb0}\ + \u{1fb1}\ + \u{1f70}\ + \u{20}\ + \u{313}\ + \u{20}\ + \u{342}\ + \u{20}\ + \u{308}\ + \u{342}\ + \u{1f74}\ + \u{3b9}\ + \u{3b7}\ + \u{3b9}\ + \u{3ae}\ + \u{3b9}\ + \u{1fc6}\ + \u{3b9}\ + \u{1f72}\ + \u{1f74}\ + \u{20}\ + \u{313}\ + \u{300}\ + \u{20}\ + \u{313}\ + \u{301}\ + \u{20}\ + \u{313}\ + \u{342}\ + \u{390}\ + \u{1fd0}\ + \u{1fd1}\ + \u{1f76}\ + \u{20}\ + \u{314}\ + \u{300}\ + \u{20}\ + \u{314}\ + \u{301}\ + \u{20}\ + \u{314}\ + \u{342}\ + \u{3b0}\ + \u{1fe0}\ + \u{1fe1}\ + \u{1f7a}\ + \u{1fe5}\ + \u{20}\ + \u{308}\ + \u{300}\ + \u{60}\ + \u{1f7c}\ + \u{3b9}\ + \u{3c9}\ + \u{3b9}\ + \u{3ce}\ + \u{3b9}\ + \u{1ff6}\ + \u{3b9}\ + \u{1f78}\ + \u{1f7c}\ + \u{20}\ + \u{314}\ + \u{2010}\ + \u{20}\ + \u{333}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{21}\ + \u{21}\ + \u{20}\ + \u{305}\ + \u{3f}\ + \u{3f}\ + \u{3f}\ + \u{21}\ + \u{21}\ + \u{3f}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{30}\ + \u{34}\ + \u{35}\ + \u{36}\ + \u{37}\ + \u{38}\ + \u{39}\ + \u{2b}\ + \u{2212}\ + \u{3d}\ + \u{28}\ + \u{29}\ + \u{72}\ + \u{73}\ + \u{61}\ + \u{2f}\ + \u{63}\ + \u{61}\ + \u{2f}\ + \u{73}\ + \u{b0}\ + \u{63}\ + \u{63}\ + \u{2f}\ + \u{6f}\ + \u{63}\ + \u{2f}\ + \u{75}\ + \u{b0}\ + \u{66}\ + \u{6e}\ + \u{6f}\ + \u{73}\ + \u{6d}\ + \u{74}\ + \u{65}\ + \u{6c}\ + \u{74}\ + \u{6d}\ + \u{5d0}\ + \u{5d1}\ + \u{5d2}\ + \u{5d3}\ + \u{66}\ + \u{61}\ + \u{78}\ + \u{2211}\ + \u{31}\ + \u{2044}\ + \u{37}\ + \u{31}\ + \u{2044}\ + \u{39}\ + \u{31}\ + \u{2044}\ + \u{31}\ + \u{30}\ + \u{31}\ + \u{2044}\ + \u{33}\ + \u{32}\ + \u{2044}\ + \u{33}\ + \u{31}\ + \u{2044}\ + \u{35}\ + \u{32}\ + \u{2044}\ + \u{35}\ + \u{33}\ + \u{2044}\ + \u{35}\ + \u{34}\ + \u{2044}\ + \u{35}\ + \u{31}\ + \u{2044}\ + \u{36}\ + \u{35}\ + \u{2044}\ + \u{36}\ + \u{31}\ + \u{2044}\ + \u{38}\ + \u{33}\ + \u{2044}\ + \u{38}\ + \u{35}\ + \u{2044}\ + \u{38}\ + \u{37}\ + \u{2044}\ + \u{38}\ + \u{31}\ + \u{2044}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{76}\ + \u{76}\ + \u{69}\ + \u{76}\ + \u{69}\ + \u{69}\ + \u{76}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{78}\ + \u{78}\ + \u{69}\ + \u{78}\ + \u{69}\ + \u{69}\ + \u{30}\ + \u{2044}\ + \u{33}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{3008}\ + \u{3009}\ + \u{31}\ + \u{30}\ + \u{31}\ + \u{31}\ + \u{31}\ + \u{32}\ + \u{31}\ + \u{33}\ + \u{31}\ + \u{34}\ + \u{31}\ + \u{35}\ + \u{31}\ + \u{36}\ + \u{31}\ + \u{37}\ + \u{31}\ + \u{38}\ + \u{31}\ + \u{39}\ + \u{32}\ + \u{30}\ + \u{28}\ + \u{31}\ + \u{29}\ + \u{28}\ + \u{32}\ + \u{29}\ + \u{28}\ + \u{33}\ + \u{29}\ + \u{28}\ + \u{34}\ + \u{29}\ + \u{28}\ + \u{35}\ + \u{29}\ + \u{28}\ + \u{36}\ + \u{29}\ + \u{28}\ + \u{37}\ + \u{29}\ + \u{28}\ + \u{38}\ + \u{29}\ + \u{28}\ + \u{39}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{30}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{31}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{32}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{33}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{34}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{35}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{36}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{37}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{38}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{39}\ + \u{29}\ + \u{28}\ + \u{32}\ + \u{30}\ + \u{29}\ + \u{28}\ + \u{61}\ + \u{29}\ + \u{28}\ + \u{62}\ + \u{29}\ + \u{28}\ + \u{63}\ + \u{29}\ + \u{28}\ + \u{64}\ + \u{29}\ + \u{28}\ + \u{65}\ + \u{29}\ + \u{28}\ + \u{66}\ + \u{29}\ + \u{28}\ + \u{67}\ + \u{29}\ + \u{28}\ + \u{68}\ + \u{29}\ + \u{28}\ + \u{69}\ + \u{29}\ + \u{28}\ + \u{6a}\ + \u{29}\ + \u{28}\ + \u{6b}\ + \u{29}\ + \u{28}\ + \u{6c}\ + \u{29}\ + \u{28}\ + \u{6d}\ + \u{29}\ + \u{28}\ + \u{6e}\ + \u{29}\ + \u{28}\ + \u{6f}\ + \u{29}\ + \u{28}\ + \u{70}\ + \u{29}\ + \u{28}\ + \u{71}\ + \u{29}\ + \u{28}\ + \u{72}\ + \u{29}\ + \u{28}\ + \u{73}\ + \u{29}\ + \u{28}\ + \u{74}\ + \u{29}\ + \u{28}\ + \u{75}\ + \u{29}\ + \u{28}\ + \u{76}\ + \u{29}\ + \u{28}\ + \u{77}\ + \u{29}\ + \u{28}\ + \u{78}\ + \u{29}\ + \u{28}\ + \u{79}\ + \u{29}\ + \u{28}\ + \u{7a}\ + \u{29}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{3a}\ + \u{3a}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{2add}\ + \u{338}\ + \u{2c30}\ + \u{2c31}\ + \u{2c32}\ + \u{2c33}\ + \u{2c34}\ + \u{2c35}\ + \u{2c36}\ + \u{2c37}\ + \u{2c38}\ + \u{2c39}\ + \u{2c3a}\ + \u{2c3b}\ + \u{2c3c}\ + \u{2c3d}\ + \u{2c3e}\ + \u{2c3f}\ + \u{2c40}\ + \u{2c41}\ + \u{2c42}\ + \u{2c43}\ + \u{2c44}\ + \u{2c45}\ + \u{2c46}\ + \u{2c47}\ + \u{2c48}\ + \u{2c49}\ + \u{2c4a}\ + \u{2c4b}\ + \u{2c4c}\ + \u{2c4d}\ + \u{2c4e}\ + \u{2c4f}\ + \u{2c50}\ + \u{2c51}\ + \u{2c52}\ + \u{2c53}\ + \u{2c54}\ + \u{2c55}\ + \u{2c56}\ + \u{2c57}\ + \u{2c58}\ + \u{2c59}\ + \u{2c5a}\ + \u{2c5b}\ + \u{2c5c}\ + \u{2c5d}\ + \u{2c5e}\ + \u{2c61}\ + \u{26b}\ + \u{1d7d}\ + \u{27d}\ + \u{2c68}\ + \u{2c6a}\ + \u{2c6c}\ + \u{2c73}\ + \u{2c76}\ + \u{23f}\ + \u{240}\ + \u{2c81}\ + \u{2c83}\ + \u{2c85}\ + \u{2c87}\ + \u{2c89}\ + \u{2c8b}\ + \u{2c8d}\ + \u{2c8f}\ + \u{2c91}\ + \u{2c93}\ + \u{2c95}\ + \u{2c97}\ + \u{2c99}\ + \u{2c9b}\ + \u{2c9d}\ + \u{2c9f}\ + \u{2ca1}\ + \u{2ca3}\ + \u{2ca5}\ + \u{2ca7}\ + \u{2ca9}\ + \u{2cab}\ + \u{2cad}\ + \u{2caf}\ + \u{2cb1}\ + \u{2cb3}\ + \u{2cb5}\ + \u{2cb7}\ + \u{2cb9}\ + \u{2cbb}\ + \u{2cbd}\ + \u{2cbf}\ + \u{2cc1}\ + \u{2cc3}\ + \u{2cc5}\ + \u{2cc7}\ + \u{2cc9}\ + \u{2ccb}\ + \u{2ccd}\ + \u{2ccf}\ + \u{2cd1}\ + \u{2cd3}\ + \u{2cd5}\ + \u{2cd7}\ + \u{2cd9}\ + \u{2cdb}\ + \u{2cdd}\ + \u{2cdf}\ + \u{2ce1}\ + \u{2ce3}\ + \u{2cec}\ + \u{2cee}\ + \u{2cf3}\ + \u{2d61}\ + \u{6bcd}\ + \u{9f9f}\ + \u{4e00}\ + \u{4e28}\ + \u{4e36}\ + \u{4e3f}\ + \u{4e59}\ + \u{4e85}\ + \u{4e8c}\ + \u{4ea0}\ + \u{4eba}\ + \u{513f}\ + \u{5165}\ + \u{516b}\ + \u{5182}\ + \u{5196}\ + \u{51ab}\ + \u{51e0}\ + \u{51f5}\ + \u{5200}\ + \u{529b}\ + \u{52f9}\ + \u{5315}\ + \u{531a}\ + \u{5338}\ + \u{5341}\ + \u{535c}\ + \u{5369}\ + \u{5382}\ + \u{53b6}\ + \u{53c8}\ + \u{53e3}\ + \u{56d7}\ + \u{571f}\ + \u{58eb}\ + \u{5902}\ + \u{590a}\ + \u{5915}\ + \u{5927}\ + \u{5973}\ + \u{5b50}\ + \u{5b80}\ + \u{5bf8}\ + \u{5c0f}\ + \u{5c22}\ + \u{5c38}\ + \u{5c6e}\ + \u{5c71}\ + \u{5ddb}\ + \u{5de5}\ + \u{5df1}\ + \u{5dfe}\ + \u{5e72}\ + \u{5e7a}\ + \u{5e7f}\ + \u{5ef4}\ + \u{5efe}\ + \u{5f0b}\ + \u{5f13}\ + \u{5f50}\ + \u{5f61}\ + \u{5f73}\ + \u{5fc3}\ + \u{6208}\ + \u{6236}\ + \u{624b}\ + \u{652f}\ + \u{6534}\ + \u{6587}\ + \u{6597}\ + \u{65a4}\ + \u{65b9}\ + \u{65e0}\ + \u{65e5}\ + \u{66f0}\ + \u{6708}\ + \u{6728}\ + \u{6b20}\ + \u{6b62}\ + \u{6b79}\ + \u{6bb3}\ + \u{6bcb}\ + \u{6bd4}\ + \u{6bdb}\ + \u{6c0f}\ + \u{6c14}\ + \u{6c34}\ + \u{706b}\ + \u{722a}\ + \u{7236}\ + \u{723b}\ + \u{723f}\ + \u{7247}\ + \u{7259}\ + \u{725b}\ + \u{72ac}\ + \u{7384}\ + \u{7389}\ + \u{74dc}\ + \u{74e6}\ + \u{7518}\ + \u{751f}\ + \u{7528}\ + \u{7530}\ + \u{758b}\ + \u{7592}\ + \u{7676}\ + \u{767d}\ + \u{76ae}\ + \u{76bf}\ + \u{76ee}\ + \u{77db}\ + \u{77e2}\ + \u{77f3}\ + \u{793a}\ + \u{79b8}\ + \u{79be}\ + \u{7a74}\ + \u{7acb}\ + \u{7af9}\ + \u{7c73}\ + \u{7cf8}\ + \u{7f36}\ + \u{7f51}\ + \u{7f8a}\ + \u{7fbd}\ + \u{8001}\ + \u{800c}\ + \u{8012}\ + \u{8033}\ + \u{807f}\ + \u{8089}\ + \u{81e3}\ + \u{81ea}\ + \u{81f3}\ + \u{81fc}\ + \u{820c}\ + \u{821b}\ + \u{821f}\ + \u{826e}\ + \u{8272}\ + \u{8278}\ + \u{864d}\ + \u{866b}\ + \u{8840}\ + \u{884c}\ + \u{8863}\ + \u{897e}\ + \u{898b}\ + \u{89d2}\ + \u{8a00}\ + \u{8c37}\ + \u{8c46}\ + \u{8c55}\ + \u{8c78}\ + \u{8c9d}\ + \u{8d64}\ + \u{8d70}\ + \u{8db3}\ + \u{8eab}\ + \u{8eca}\ + \u{8f9b}\ + \u{8fb0}\ + \u{8fb5}\ + \u{9091}\ + \u{9149}\ + \u{91c6}\ + \u{91cc}\ + \u{91d1}\ + \u{9577}\ + \u{9580}\ + \u{961c}\ + \u{96b6}\ + \u{96b9}\ + \u{96e8}\ + \u{9751}\ + \u{975e}\ + \u{9762}\ + \u{9769}\ + \u{97cb}\ + \u{97ed}\ + \u{97f3}\ + \u{9801}\ + \u{98a8}\ + \u{98db}\ + \u{98df}\ + \u{9996}\ + \u{9999}\ + \u{99ac}\ + \u{9aa8}\ + \u{9ad8}\ + \u{9adf}\ + \u{9b25}\ + \u{9b2f}\ + \u{9b32}\ + \u{9b3c}\ + \u{9b5a}\ + \u{9ce5}\ + \u{9e75}\ + \u{9e7f}\ + \u{9ea5}\ + \u{9ebb}\ + \u{9ec3}\ + \u{9ecd}\ + \u{9ed1}\ + \u{9ef9}\ + \u{9efd}\ + \u{9f0e}\ + \u{9f13}\ + \u{9f20}\ + \u{9f3b}\ + \u{9f4a}\ + \u{9f52}\ + \u{9f8d}\ + \u{9f9c}\ + \u{9fa0}\ + \u{2e}\ + \u{3012}\ + \u{5344}\ + \u{5345}\ + \u{20}\ + \u{3099}\ + \u{20}\ + \u{309a}\ + \u{3088}\ + \u{308a}\ + \u{30b3}\ + \u{30c8}\ + \u{1100}\ + \u{1101}\ + \u{11aa}\ + \u{1102}\ + \u{11ac}\ + \u{11ad}\ + \u{1103}\ + \u{1104}\ + \u{1105}\ + \u{11b0}\ + \u{11b1}\ + \u{11b2}\ + \u{11b3}\ + \u{11b4}\ + \u{11b5}\ + \u{111a}\ + \u{1106}\ + \u{1107}\ + \u{1108}\ + \u{1121}\ + \u{1109}\ + \u{110a}\ + \u{110b}\ + \u{110c}\ + \u{110d}\ + \u{110e}\ + \u{110f}\ + \u{1110}\ + \u{1111}\ + \u{1112}\ + \u{1161}\ + \u{1162}\ + \u{1163}\ + \u{1164}\ + \u{1165}\ + \u{1166}\ + \u{1167}\ + \u{1168}\ + \u{1169}\ + \u{116a}\ + \u{116b}\ + \u{116c}\ + \u{116d}\ + \u{116e}\ + \u{116f}\ + \u{1170}\ + \u{1171}\ + \u{1172}\ + \u{1173}\ + \u{1174}\ + \u{1175}\ + \u{1114}\ + \u{1115}\ + \u{11c7}\ + \u{11c8}\ + \u{11cc}\ + \u{11ce}\ + \u{11d3}\ + \u{11d7}\ + \u{11d9}\ + \u{111c}\ + \u{11dd}\ + \u{11df}\ + \u{111d}\ + \u{111e}\ + \u{1120}\ + \u{1122}\ + \u{1123}\ + \u{1127}\ + \u{1129}\ + \u{112b}\ + \u{112c}\ + \u{112d}\ + \u{112e}\ + \u{112f}\ + \u{1132}\ + \u{1136}\ + \u{1140}\ + \u{1147}\ + \u{114c}\ + \u{11f1}\ + \u{11f2}\ + \u{1157}\ + \u{1158}\ + \u{1159}\ + \u{1184}\ + \u{1185}\ + \u{1188}\ + \u{1191}\ + \u{1192}\ + \u{1194}\ + \u{119e}\ + \u{11a1}\ + \u{4e09}\ + \u{56db}\ + \u{4e0a}\ + \u{4e2d}\ + \u{4e0b}\ + \u{7532}\ + \u{4e19}\ + \u{4e01}\ + \u{5929}\ + \u{5730}\ + \u{28}\ + \u{1100}\ + \u{29}\ + \u{28}\ + \u{1102}\ + \u{29}\ + \u{28}\ + \u{1103}\ + \u{29}\ + \u{28}\ + \u{1105}\ + \u{29}\ + \u{28}\ + \u{1106}\ + \u{29}\ + \u{28}\ + \u{1107}\ + \u{29}\ + \u{28}\ + \u{1109}\ + \u{29}\ + \u{28}\ + \u{110b}\ + \u{29}\ + \u{28}\ + \u{110c}\ + \u{29}\ + \u{28}\ + \u{110e}\ + \u{29}\ + \u{28}\ + \u{110f}\ + \u{29}\ + \u{28}\ + \u{1110}\ + \u{29}\ + \u{28}\ + \u{1111}\ + \u{29}\ + \u{28}\ + \u{1112}\ + \u{29}\ + \u{28}\ + \u{ac00}\ + \u{29}\ + \u{28}\ + \u{b098}\ + \u{29}\ + \u{28}\ + \u{b2e4}\ + \u{29}\ + \u{28}\ + \u{b77c}\ + \u{29}\ + \u{28}\ + \u{b9c8}\ + \u{29}\ + \u{28}\ + \u{bc14}\ + \u{29}\ + \u{28}\ + \u{c0ac}\ + \u{29}\ + \u{28}\ + \u{c544}\ + \u{29}\ + \u{28}\ + \u{c790}\ + \u{29}\ + \u{28}\ + \u{cc28}\ + \u{29}\ + \u{28}\ + \u{ce74}\ + \u{29}\ + \u{28}\ + \u{d0c0}\ + \u{29}\ + \u{28}\ + \u{d30c}\ + \u{29}\ + \u{28}\ + \u{d558}\ + \u{29}\ + \u{28}\ + \u{c8fc}\ + \u{29}\ + \u{28}\ + \u{c624}\ + \u{c804}\ + \u{29}\ + \u{28}\ + \u{c624}\ + \u{d6c4}\ + \u{29}\ + \u{28}\ + \u{4e00}\ + \u{29}\ + \u{28}\ + \u{4e8c}\ + \u{29}\ + \u{28}\ + \u{4e09}\ + \u{29}\ + \u{28}\ + \u{56db}\ + \u{29}\ + \u{28}\ + \u{4e94}\ + \u{29}\ + \u{28}\ + \u{516d}\ + \u{29}\ + \u{28}\ + \u{4e03}\ + \u{29}\ + \u{28}\ + \u{516b}\ + \u{29}\ + \u{28}\ + \u{4e5d}\ + \u{29}\ + \u{28}\ + \u{5341}\ + \u{29}\ + \u{28}\ + \u{6708}\ + \u{29}\ + \u{28}\ + \u{706b}\ + \u{29}\ + \u{28}\ + \u{6c34}\ + \u{29}\ + \u{28}\ + \u{6728}\ + \u{29}\ + \u{28}\ + \u{91d1}\ + \u{29}\ + \u{28}\ + \u{571f}\ + \u{29}\ + \u{28}\ + \u{65e5}\ + \u{29}\ + \u{28}\ + \u{682a}\ + \u{29}\ + \u{28}\ + \u{6709}\ + \u{29}\ + \u{28}\ + \u{793e}\ + \u{29}\ + \u{28}\ + \u{540d}\ + \u{29}\ + \u{28}\ + \u{7279}\ + \u{29}\ + \u{28}\ + \u{8ca1}\ + \u{29}\ + \u{28}\ + \u{795d}\ + \u{29}\ + \u{28}\ + \u{52b4}\ + \u{29}\ + \u{28}\ + \u{4ee3}\ + \u{29}\ + \u{28}\ + \u{547c}\ + \u{29}\ + \u{28}\ + \u{5b66}\ + \u{29}\ + \u{28}\ + \u{76e3}\ + \u{29}\ + \u{28}\ + \u{4f01}\ + \u{29}\ + \u{28}\ + \u{8cc7}\ + \u{29}\ + \u{28}\ + \u{5354}\ + \u{29}\ + \u{28}\ + \u{796d}\ + \u{29}\ + \u{28}\ + \u{4f11}\ + \u{29}\ + \u{28}\ + \u{81ea}\ + \u{29}\ + \u{28}\ + \u{81f3}\ + \u{29}\ + \u{554f}\ + \u{5e7c}\ + \u{7b8f}\ + \u{70}\ + \u{74}\ + \u{65}\ + \u{32}\ + \u{31}\ + \u{32}\ + \u{32}\ + \u{32}\ + \u{33}\ + \u{32}\ + \u{34}\ + \u{32}\ + \u{35}\ + \u{32}\ + \u{36}\ + \u{32}\ + \u{37}\ + \u{32}\ + \u{38}\ + \u{32}\ + \u{39}\ + \u{33}\ + \u{30}\ + \u{33}\ + \u{31}\ + \u{33}\ + \u{32}\ + \u{33}\ + \u{33}\ + \u{33}\ + \u{34}\ + \u{33}\ + \u{35}\ + \u{ac00}\ + \u{b098}\ + \u{b2e4}\ + \u{b77c}\ + \u{b9c8}\ + \u{bc14}\ + \u{c0ac}\ + \u{c544}\ + \u{c790}\ + \u{cc28}\ + \u{ce74}\ + \u{d0c0}\ + \u{d30c}\ + \u{d558}\ + \u{cc38}\ + \u{ace0}\ + \u{c8fc}\ + \u{c758}\ + \u{c6b0}\ + \u{4e94}\ + \u{516d}\ + \u{4e03}\ + \u{4e5d}\ + \u{682a}\ + \u{6709}\ + \u{793e}\ + \u{540d}\ + \u{7279}\ + \u{8ca1}\ + \u{795d}\ + \u{52b4}\ + \u{79d8}\ + \u{7537}\ + \u{9069}\ + \u{512a}\ + \u{5370}\ + \u{6ce8}\ + \u{9805}\ + \u{4f11}\ + \u{5199}\ + \u{6b63}\ + \u{5de6}\ + \u{53f3}\ + \u{533b}\ + \u{5b97}\ + \u{5b66}\ + \u{76e3}\ + \u{4f01}\ + \u{8cc7}\ + \u{5354}\ + \u{591c}\ + \u{33}\ + \u{36}\ + \u{33}\ + \u{37}\ + \u{33}\ + \u{38}\ + \u{33}\ + \u{39}\ + \u{34}\ + \u{30}\ + \u{34}\ + \u{31}\ + \u{34}\ + \u{32}\ + \u{34}\ + \u{33}\ + \u{34}\ + \u{34}\ + \u{34}\ + \u{35}\ + \u{34}\ + \u{36}\ + \u{34}\ + \u{37}\ + \u{34}\ + \u{38}\ + \u{34}\ + \u{39}\ + \u{35}\ + \u{30}\ + \u{31}\ + \u{6708}\ + \u{32}\ + \u{6708}\ + \u{33}\ + \u{6708}\ + \u{34}\ + \u{6708}\ + \u{35}\ + \u{6708}\ + \u{36}\ + \u{6708}\ + \u{37}\ + \u{6708}\ + \u{38}\ + \u{6708}\ + \u{39}\ + \u{6708}\ + \u{31}\ + \u{30}\ + \u{6708}\ + \u{31}\ + \u{31}\ + \u{6708}\ + \u{31}\ + \u{32}\ + \u{6708}\ + \u{68}\ + \u{67}\ + \u{65}\ + \u{72}\ + \u{67}\ + \u{65}\ + \u{76}\ + \u{6c}\ + \u{74}\ + \u{64}\ + \u{30a2}\ + \u{30a4}\ + \u{30a6}\ + \u{30a8}\ + \u{30aa}\ + \u{30ab}\ + \u{30ad}\ + \u{30af}\ + \u{30b1}\ + \u{30b3}\ + \u{30b5}\ + \u{30b7}\ + \u{30b9}\ + \u{30bb}\ + \u{30bd}\ + \u{30bf}\ + \u{30c1}\ + \u{30c4}\ + \u{30c6}\ + \u{30c8}\ + \u{30ca}\ + \u{30cb}\ + \u{30cc}\ + \u{30cd}\ + \u{30ce}\ + \u{30cf}\ + \u{30d2}\ + \u{30d5}\ + \u{30d8}\ + \u{30db}\ + \u{30de}\ + \u{30df}\ + \u{30e0}\ + \u{30e1}\ + \u{30e2}\ + \u{30e4}\ + \u{30e6}\ + \u{30e8}\ + \u{30e9}\ + \u{30ea}\ + \u{30eb}\ + \u{30ec}\ + \u{30ed}\ + \u{30ef}\ + \u{30f0}\ + \u{30f1}\ + \u{30f2}\ + \u{30a2}\ + \u{30d1}\ + \u{30fc}\ + \u{30c8}\ + \u{30a2}\ + \u{30eb}\ + \u{30d5}\ + \u{30a1}\ + \u{30a2}\ + \u{30f3}\ + \u{30da}\ + \u{30a2}\ + \u{30a2}\ + \u{30fc}\ + \u{30eb}\ + \u{30a4}\ + \u{30cb}\ + \u{30f3}\ + \u{30b0}\ + \u{30a4}\ + \u{30f3}\ + \u{30c1}\ + \u{30a6}\ + \u{30a9}\ + \u{30f3}\ + \u{30a8}\ + \u{30b9}\ + \u{30af}\ + \u{30fc}\ + \u{30c9}\ + \u{30a8}\ + \u{30fc}\ + \u{30ab}\ + \u{30fc}\ + \u{30aa}\ + \u{30f3}\ + \u{30b9}\ + \u{30aa}\ + \u{30fc}\ + \u{30e0}\ + \u{30ab}\ + \u{30a4}\ + \u{30ea}\ + \u{30ab}\ + \u{30e9}\ + \u{30c3}\ + \u{30c8}\ + \u{30ab}\ + \u{30ed}\ + \u{30ea}\ + \u{30fc}\ + \u{30ac}\ + \u{30ed}\ + \u{30f3}\ + \u{30ac}\ + \u{30f3}\ + \u{30de}\ + \u{30ae}\ + \u{30ac}\ + \u{30ae}\ + \u{30cb}\ + \u{30fc}\ + \u{30ad}\ + \u{30e5}\ + \u{30ea}\ + \u{30fc}\ + \u{30ae}\ + \u{30eb}\ + \u{30c0}\ + \u{30fc}\ + \u{30ad}\ + \u{30ed}\ + \u{30ad}\ + \u{30ed}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30ad}\ + \u{30ed}\ + \u{30e1}\ + \u{30fc}\ + \u{30c8}\ + \u{30eb}\ + \u{30ad}\ + \u{30ed}\ + \u{30ef}\ + \u{30c3}\ + \u{30c8}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30c8}\ + \u{30f3}\ + \u{30af}\ + \u{30eb}\ + \u{30bc}\ + \u{30a4}\ + \u{30ed}\ + \u{30af}\ + \u{30ed}\ + \u{30fc}\ + \u{30cd}\ + \u{30b1}\ + \u{30fc}\ + \u{30b9}\ + \u{30b3}\ + \u{30eb}\ + \u{30ca}\ + \u{30b3}\ + \u{30fc}\ + \u{30dd}\ + \u{30b5}\ + \u{30a4}\ + \u{30af}\ + \u{30eb}\ + \u{30b5}\ + \u{30f3}\ + \u{30c1}\ + \u{30fc}\ + \u{30e0}\ + \u{30b7}\ + \u{30ea}\ + \u{30f3}\ + \u{30b0}\ + \u{30bb}\ + \u{30f3}\ + \u{30c1}\ + \u{30bb}\ + \u{30f3}\ + \u{30c8}\ + \u{30c0}\ + \u{30fc}\ + \u{30b9}\ + \u{30c7}\ + \u{30b7}\ + \u{30c9}\ + \u{30eb}\ + \u{30c8}\ + \u{30f3}\ + \u{30ca}\ + \u{30ce}\ + \u{30ce}\ + \u{30c3}\ + \u{30c8}\ + \u{30cf}\ + \u{30a4}\ + \u{30c4}\ + \u{30d1}\ + \u{30fc}\ + \u{30bb}\ + \u{30f3}\ + \u{30c8}\ + \u{30d1}\ + \u{30fc}\ + \u{30c4}\ + \u{30d0}\ + \u{30fc}\ + \u{30ec}\ + \u{30eb}\ + \u{30d4}\ + \u{30a2}\ + \u{30b9}\ + \u{30c8}\ + \u{30eb}\ + \u{30d4}\ + \u{30af}\ + \u{30eb}\ + \u{30d4}\ + \u{30b3}\ + \u{30d3}\ + \u{30eb}\ + \u{30d5}\ + \u{30a1}\ + \u{30e9}\ + \u{30c3}\ + \u{30c9}\ + \u{30d5}\ + \u{30a3}\ + \u{30fc}\ + \u{30c8}\ + \u{30d6}\ + \u{30c3}\ + \u{30b7}\ + \u{30a7}\ + \u{30eb}\ + \u{30d5}\ + \u{30e9}\ + \u{30f3}\ + \u{30d8}\ + \u{30af}\ + \u{30bf}\ + \u{30fc}\ + \u{30eb}\ + \u{30da}\ + \u{30bd}\ + \u{30da}\ + \u{30cb}\ + \u{30d2}\ + \u{30d8}\ + \u{30eb}\ + \u{30c4}\ + \u{30da}\ + \u{30f3}\ + \u{30b9}\ + \u{30da}\ + \u{30fc}\ + \u{30b8}\ + \u{30d9}\ + \u{30fc}\ + \u{30bf}\ + \u{30dd}\ + \u{30a4}\ + \u{30f3}\ + \u{30c8}\ + \u{30dc}\ + \u{30eb}\ + \u{30c8}\ + \u{30db}\ + \u{30f3}\ + \u{30dd}\ + \u{30f3}\ + \u{30c9}\ + \u{30db}\ + \u{30fc}\ + \u{30eb}\ + \u{30db}\ + \u{30fc}\ + \u{30f3}\ + \u{30de}\ + \u{30a4}\ + \u{30af}\ + \u{30ed}\ + \u{30de}\ + \u{30a4}\ + \u{30eb}\ + \u{30de}\ + \u{30c3}\ + \u{30cf}\ + \u{30de}\ + \u{30eb}\ + \u{30af}\ + \u{30de}\ + \u{30f3}\ + \u{30b7}\ + \u{30e7}\ + \u{30f3}\ + \u{30df}\ + \u{30af}\ + \u{30ed}\ + \u{30f3}\ + \u{30df}\ + \u{30ea}\ + \u{30df}\ + \u{30ea}\ + \u{30d0}\ + \u{30fc}\ + \u{30eb}\ + \u{30e1}\ + \u{30ac}\ + \u{30e1}\ + \u{30ac}\ + \u{30c8}\ + \u{30f3}\ + \u{30e1}\ + \u{30fc}\ + \u{30c8}\ + \u{30eb}\ + \u{30e4}\ + \u{30fc}\ + \u{30c9}\ + \u{30e4}\ + \u{30fc}\ + \u{30eb}\ + \u{30e6}\ + \u{30a2}\ + \u{30f3}\ + \u{30ea}\ + \u{30c3}\ + \u{30c8}\ + \u{30eb}\ + \u{30ea}\ + \u{30e9}\ + \u{30eb}\ + \u{30d4}\ + \u{30fc}\ + \u{30eb}\ + \u{30fc}\ + \u{30d6}\ + \u{30eb}\ + \u{30ec}\ + \u{30e0}\ + \u{30ec}\ + \u{30f3}\ + \u{30c8}\ + \u{30b2}\ + \u{30f3}\ + \u{30ef}\ + \u{30c3}\ + \u{30c8}\ + \u{30}\ + \u{70b9}\ + \u{31}\ + \u{70b9}\ + \u{32}\ + \u{70b9}\ + \u{33}\ + \u{70b9}\ + \u{34}\ + \u{70b9}\ + \u{35}\ + \u{70b9}\ + \u{36}\ + \u{70b9}\ + \u{37}\ + \u{70b9}\ + \u{38}\ + \u{70b9}\ + \u{39}\ + \u{70b9}\ + \u{31}\ + \u{30}\ + \u{70b9}\ + \u{31}\ + \u{31}\ + \u{70b9}\ + \u{31}\ + \u{32}\ + \u{70b9}\ + \u{31}\ + \u{33}\ + \u{70b9}\ + \u{31}\ + \u{34}\ + \u{70b9}\ + \u{31}\ + \u{35}\ + \u{70b9}\ + \u{31}\ + \u{36}\ + \u{70b9}\ + \u{31}\ + \u{37}\ + \u{70b9}\ + \u{31}\ + \u{38}\ + \u{70b9}\ + \u{31}\ + \u{39}\ + \u{70b9}\ + \u{32}\ + \u{30}\ + \u{70b9}\ + \u{32}\ + \u{31}\ + \u{70b9}\ + \u{32}\ + \u{32}\ + \u{70b9}\ + \u{32}\ + \u{33}\ + \u{70b9}\ + \u{32}\ + \u{34}\ + \u{70b9}\ + \u{68}\ + \u{70}\ + \u{61}\ + \u{64}\ + \u{61}\ + \u{61}\ + \u{75}\ + \u{62}\ + \u{61}\ + \u{72}\ + \u{6f}\ + \u{76}\ + \u{70}\ + \u{63}\ + \u{64}\ + \u{6d}\ + \u{64}\ + \u{6d}\ + \u{32}\ + \u{64}\ + \u{6d}\ + \u{33}\ + \u{69}\ + \u{75}\ + \u{5e73}\ + \u{6210}\ + \u{662d}\ + \u{548c}\ + \u{5927}\ + \u{6b63}\ + \u{660e}\ + \u{6cbb}\ + \u{682a}\ + \u{5f0f}\ + \u{4f1a}\ + \u{793e}\ + \u{70}\ + \u{61}\ + \u{6e}\ + \u{61}\ + \u{3bc}\ + \u{61}\ + \u{6d}\ + \u{61}\ + \u{6b}\ + \u{61}\ + \u{6b}\ + \u{62}\ + \u{6d}\ + \u{62}\ + \u{67}\ + \u{62}\ + \u{63}\ + \u{61}\ + \u{6c}\ + \u{6b}\ + \u{63}\ + \u{61}\ + \u{6c}\ + \u{70}\ + \u{66}\ + \u{6e}\ + \u{66}\ + \u{3bc}\ + \u{66}\ + \u{3bc}\ + \u{67}\ + \u{6d}\ + \u{67}\ + \u{6b}\ + \u{67}\ + \u{68}\ + \u{7a}\ + \u{6b}\ + \u{68}\ + \u{7a}\ + \u{6d}\ + \u{68}\ + \u{7a}\ + \u{67}\ + \u{68}\ + \u{7a}\ + \u{74}\ + \u{68}\ + \u{7a}\ + \u{3bc}\ + \u{6c}\ + \u{6d}\ + \u{6c}\ + \u{64}\ + \u{6c}\ + \u{6b}\ + \u{6c}\ + \u{66}\ + \u{6d}\ + \u{6e}\ + \u{6d}\ + \u{3bc}\ + \u{6d}\ + \u{6d}\ + \u{6d}\ + \u{63}\ + \u{6d}\ + \u{6b}\ + \u{6d}\ + \u{6d}\ + \u{6d}\ + \u{32}\ + \u{63}\ + \u{6d}\ + \u{32}\ + \u{6d}\ + \u{32}\ + \u{6b}\ + \u{6d}\ + \u{32}\ + \u{6d}\ + \u{6d}\ + \u{33}\ + \u{63}\ + \u{6d}\ + \u{33}\ + \u{6d}\ + \u{33}\ + \u{6b}\ + \u{6d}\ + \u{33}\ + \u{6d}\ + \u{2215}\ + \u{73}\ + \u{6d}\ + \u{2215}\ + \u{73}\ + \u{32}\ + \u{6b}\ + \u{70}\ + \u{61}\ + \u{6d}\ + \u{70}\ + \u{61}\ + \u{67}\ + \u{70}\ + \u{61}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{2215}\ + \u{73}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{2215}\ + \u{73}\ + \u{32}\ + \u{70}\ + \u{73}\ + \u{6e}\ + \u{73}\ + \u{3bc}\ + \u{73}\ + \u{6d}\ + \u{73}\ + \u{70}\ + \u{76}\ + \u{6e}\ + \u{76}\ + \u{3bc}\ + \u{76}\ + \u{6d}\ + \u{76}\ + \u{6b}\ + \u{76}\ + \u{70}\ + \u{77}\ + \u{6e}\ + \u{77}\ + \u{3bc}\ + \u{77}\ + \u{6d}\ + \u{77}\ + \u{6b}\ + \u{77}\ + \u{6b}\ + \u{3c9}\ + \u{6d}\ + \u{3c9}\ + \u{62}\ + \u{71}\ + \u{63}\ + \u{63}\ + \u{63}\ + \u{64}\ + \u{63}\ + \u{2215}\ + \u{6b}\ + \u{67}\ + \u{64}\ + \u{62}\ + \u{67}\ + \u{79}\ + \u{68}\ + \u{61}\ + \u{68}\ + \u{70}\ + \u{69}\ + \u{6e}\ + \u{6b}\ + \u{6b}\ + \u{6b}\ + \u{74}\ + \u{6c}\ + \u{6d}\ + \u{6c}\ + \u{6e}\ + \u{6c}\ + \u{6f}\ + \u{67}\ + \u{6c}\ + \u{78}\ + \u{6d}\ + \u{69}\ + \u{6c}\ + \u{6d}\ + \u{6f}\ + \u{6c}\ + \u{70}\ + \u{68}\ + \u{70}\ + \u{70}\ + \u{6d}\ + \u{70}\ + \u{72}\ + \u{73}\ + \u{72}\ + \u{73}\ + \u{76}\ + \u{77}\ + \u{62}\ + \u{76}\ + \u{2215}\ + \u{6d}\ + \u{61}\ + \u{2215}\ + \u{6d}\ + \u{31}\ + \u{65e5}\ + \u{32}\ + \u{65e5}\ + \u{33}\ + \u{65e5}\ + \u{34}\ + \u{65e5}\ + \u{35}\ + \u{65e5}\ + \u{36}\ + \u{65e5}\ + \u{37}\ + \u{65e5}\ + \u{38}\ + \u{65e5}\ + \u{39}\ + \u{65e5}\ + \u{31}\ + \u{30}\ + \u{65e5}\ + \u{31}\ + \u{31}\ + \u{65e5}\ + \u{31}\ + \u{32}\ + \u{65e5}\ + \u{31}\ + \u{33}\ + \u{65e5}\ + \u{31}\ + \u{34}\ + \u{65e5}\ + \u{31}\ + \u{35}\ + \u{65e5}\ + \u{31}\ + \u{36}\ + \u{65e5}\ + \u{31}\ + \u{37}\ + \u{65e5}\ + \u{31}\ + \u{38}\ + \u{65e5}\ + \u{31}\ + \u{39}\ + \u{65e5}\ + \u{32}\ + \u{30}\ + \u{65e5}\ + \u{32}\ + \u{31}\ + \u{65e5}\ + \u{32}\ + \u{32}\ + \u{65e5}\ + \u{32}\ + \u{33}\ + \u{65e5}\ + \u{32}\ + \u{34}\ + \u{65e5}\ + \u{32}\ + \u{35}\ + \u{65e5}\ + \u{32}\ + \u{36}\ + \u{65e5}\ + \u{32}\ + \u{37}\ + \u{65e5}\ + \u{32}\ + \u{38}\ + \u{65e5}\ + \u{32}\ + \u{39}\ + \u{65e5}\ + \u{33}\ + \u{30}\ + \u{65e5}\ + \u{33}\ + \u{31}\ + \u{65e5}\ + \u{67}\ + \u{61}\ + \u{6c}\ + \u{a641}\ + \u{a643}\ + \u{a645}\ + \u{a647}\ + \u{a649}\ + \u{a64d}\ + \u{a64f}\ + \u{a651}\ + \u{a653}\ + \u{a655}\ + \u{a657}\ + \u{a659}\ + \u{a65b}\ + \u{a65d}\ + \u{a65f}\ + \u{a661}\ + \u{a663}\ + \u{a665}\ + \u{a667}\ + \u{a669}\ + \u{a66b}\ + \u{a66d}\ + \u{a681}\ + \u{a683}\ + \u{a685}\ + \u{a687}\ + \u{a689}\ + \u{a68b}\ + \u{a68d}\ + \u{a68f}\ + \u{a691}\ + \u{a693}\ + \u{a695}\ + \u{a697}\ + \u{a699}\ + \u{a69b}\ + \u{a723}\ + \u{a725}\ + \u{a727}\ + \u{a729}\ + \u{a72b}\ + \u{a72d}\ + \u{a72f}\ + \u{a733}\ + \u{a735}\ + \u{a737}\ + \u{a739}\ + \u{a73b}\ + \u{a73d}\ + \u{a73f}\ + \u{a741}\ + \u{a743}\ + \u{a745}\ + \u{a747}\ + \u{a749}\ + \u{a74b}\ + \u{a74d}\ + \u{a74f}\ + \u{a751}\ + \u{a753}\ + \u{a755}\ + \u{a757}\ + \u{a759}\ + \u{a75b}\ + \u{a75d}\ + \u{a75f}\ + \u{a761}\ + \u{a763}\ + \u{a765}\ + \u{a767}\ + \u{a769}\ + \u{a76b}\ + \u{a76d}\ + \u{a76f}\ + \u{a77a}\ + \u{a77c}\ + \u{1d79}\ + \u{a77f}\ + \u{a781}\ + \u{a783}\ + \u{a785}\ + \u{a787}\ + \u{a78c}\ + \u{a791}\ + \u{a793}\ + \u{a797}\ + \u{a799}\ + \u{a79b}\ + \u{a79d}\ + \u{a79f}\ + \u{a7a1}\ + \u{a7a3}\ + \u{a7a5}\ + \u{a7a7}\ + \u{a7a9}\ + \u{26c}\ + \u{29e}\ + \u{287}\ + \u{ab53}\ + \u{a7b5}\ + \u{a7b7}\ + \u{ab37}\ + \u{ab52}\ + \u{13a0}\ + \u{13a1}\ + \u{13a2}\ + \u{13a3}\ + \u{13a4}\ + \u{13a5}\ + \u{13a6}\ + \u{13a7}\ + \u{13a8}\ + \u{13a9}\ + \u{13aa}\ + \u{13ab}\ + \u{13ac}\ + \u{13ad}\ + \u{13ae}\ + \u{13af}\ + \u{13b0}\ + \u{13b1}\ + \u{13b2}\ + \u{13b3}\ + \u{13b4}\ + \u{13b5}\ + \u{13b6}\ + \u{13b7}\ + \u{13b8}\ + \u{13b9}\ + \u{13ba}\ + \u{13bb}\ + \u{13bc}\ + \u{13bd}\ + \u{13be}\ + \u{13bf}\ + \u{13c0}\ + \u{13c1}\ + \u{13c2}\ + \u{13c3}\ + \u{13c4}\ + \u{13c5}\ + \u{13c6}\ + \u{13c7}\ + \u{13c8}\ + \u{13c9}\ + \u{13ca}\ + \u{13cb}\ + \u{13cc}\ + \u{13cd}\ + \u{13ce}\ + \u{13cf}\ + \u{13d0}\ + \u{13d1}\ + \u{13d2}\ + \u{13d3}\ + \u{13d4}\ + \u{13d5}\ + \u{13d6}\ + \u{13d7}\ + \u{13d8}\ + \u{13d9}\ + \u{13da}\ + \u{13db}\ + \u{13dc}\ + \u{13dd}\ + \u{13de}\ + \u{13df}\ + \u{13e0}\ + \u{13e1}\ + \u{13e2}\ + \u{13e3}\ + \u{13e4}\ + \u{13e5}\ + \u{13e6}\ + \u{13e7}\ + \u{13e8}\ + \u{13e9}\ + \u{13ea}\ + \u{13eb}\ + \u{13ec}\ + \u{13ed}\ + \u{13ee}\ + \u{13ef}\ + \u{8c48}\ + \u{66f4}\ + \u{8cc8}\ + \u{6ed1}\ + \u{4e32}\ + \u{53e5}\ + \u{5951}\ + \u{5587}\ + \u{5948}\ + \u{61f6}\ + \u{7669}\ + \u{7f85}\ + \u{863f}\ + \u{87ba}\ + \u{88f8}\ + \u{908f}\ + \u{6a02}\ + \u{6d1b}\ + \u{70d9}\ + \u{73de}\ + \u{843d}\ + \u{916a}\ + \u{99f1}\ + \u{4e82}\ + \u{5375}\ + \u{6b04}\ + \u{721b}\ + \u{862d}\ + \u{9e1e}\ + \u{5d50}\ + \u{6feb}\ + \u{85cd}\ + \u{8964}\ + \u{62c9}\ + \u{81d8}\ + \u{881f}\ + \u{5eca}\ + \u{6717}\ + \u{6d6a}\ + \u{72fc}\ + \u{90ce}\ + \u{4f86}\ + \u{51b7}\ + \u{52de}\ + \u{64c4}\ + \u{6ad3}\ + \u{7210}\ + \u{76e7}\ + \u{8606}\ + \u{865c}\ + \u{8def}\ + \u{9732}\ + \u{9b6f}\ + \u{9dfa}\ + \u{788c}\ + \u{797f}\ + \u{7da0}\ + \u{83c9}\ + \u{9304}\ + \u{8ad6}\ + \u{58df}\ + \u{5f04}\ + \u{7c60}\ + \u{807e}\ + \u{7262}\ + \u{78ca}\ + \u{8cc2}\ + \u{96f7}\ + \u{58d8}\ + \u{5c62}\ + \u{6a13}\ + \u{6dda}\ + \u{6f0f}\ + \u{7d2f}\ + \u{7e37}\ + \u{964b}\ + \u{52d2}\ + \u{808b}\ + \u{51dc}\ + \u{51cc}\ + \u{7a1c}\ + \u{7dbe}\ + \u{83f1}\ + \u{9675}\ + \u{8b80}\ + \u{62cf}\ + \u{8afe}\ + \u{4e39}\ + \u{5be7}\ + \u{6012}\ + \u{7387}\ + \u{7570}\ + \u{5317}\ + \u{78fb}\ + \u{4fbf}\ + \u{5fa9}\ + \u{4e0d}\ + \u{6ccc}\ + \u{6578}\ + \u{7d22}\ + \u{53c3}\ + \u{585e}\ + \u{7701}\ + \u{8449}\ + \u{8aaa}\ + \u{6bba}\ + \u{6c88}\ + \u{62fe}\ + \u{82e5}\ + \u{63a0}\ + \u{7565}\ + \u{4eae}\ + \u{5169}\ + \u{51c9}\ + \u{6881}\ + \u{7ce7}\ + \u{826f}\ + \u{8ad2}\ + \u{91cf}\ + \u{52f5}\ + \u{5442}\ + \u{5eec}\ + \u{65c5}\ + \u{6ffe}\ + \u{792a}\ + \u{95ad}\ + \u{9a6a}\ + \u{9e97}\ + \u{9ece}\ + \u{66c6}\ + \u{6b77}\ + \u{8f62}\ + \u{5e74}\ + \u{6190}\ + \u{6200}\ + \u{649a}\ + \u{6f23}\ + \u{7149}\ + \u{7489}\ + \u{79ca}\ + \u{7df4}\ + \u{806f}\ + \u{8f26}\ + \u{84ee}\ + \u{9023}\ + \u{934a}\ + \u{5217}\ + \u{52a3}\ + \u{54bd}\ + \u{70c8}\ + \u{88c2}\ + \u{5ec9}\ + \u{5ff5}\ + \u{637b}\ + \u{6bae}\ + \u{7c3e}\ + \u{7375}\ + \u{4ee4}\ + \u{56f9}\ + \u{5dba}\ + \u{601c}\ + \u{73b2}\ + \u{7469}\ + \u{7f9a}\ + \u{8046}\ + \u{9234}\ + \u{96f6}\ + \u{9748}\ + \u{9818}\ + \u{4f8b}\ + \u{79ae}\ + \u{91b4}\ + \u{96b8}\ + \u{60e1}\ + \u{4e86}\ + \u{50da}\ + \u{5bee}\ + \u{5c3f}\ + \u{6599}\ + \u{71ce}\ + \u{7642}\ + \u{84fc}\ + \u{907c}\ + \u{6688}\ + \u{962e}\ + \u{5289}\ + \u{677b}\ + \u{67f3}\ + \u{6d41}\ + \u{6e9c}\ + \u{7409}\ + \u{7559}\ + \u{786b}\ + \u{7d10}\ + \u{985e}\ + \u{622e}\ + \u{9678}\ + \u{502b}\ + \u{5d19}\ + \u{6dea}\ + \u{8f2a}\ + \u{5f8b}\ + \u{6144}\ + \u{6817}\ + \u{9686}\ + \u{5229}\ + \u{540f}\ + \u{5c65}\ + \u{6613}\ + \u{674e}\ + \u{68a8}\ + \u{6ce5}\ + \u{7406}\ + \u{75e2}\ + \u{7f79}\ + \u{88cf}\ + \u{88e1}\ + \u{96e2}\ + \u{533f}\ + \u{6eba}\ + \u{541d}\ + \u{71d0}\ + \u{7498}\ + \u{85fa}\ + \u{96a3}\ + \u{9c57}\ + \u{9e9f}\ + \u{6797}\ + \u{6dcb}\ + \u{81e8}\ + \u{7b20}\ + \u{7c92}\ + \u{72c0}\ + \u{7099}\ + \u{8b58}\ + \u{4ec0}\ + \u{8336}\ + \u{523a}\ + \u{5207}\ + \u{5ea6}\ + \u{62d3}\ + \u{7cd6}\ + \u{5b85}\ + \u{6d1e}\ + \u{66b4}\ + \u{8f3b}\ + \u{964d}\ + \u{5ed3}\ + \u{5140}\ + \u{55c0}\ + \u{585a}\ + \u{6674}\ + \u{51de}\ + \u{732a}\ + \u{76ca}\ + \u{793c}\ + \u{795e}\ + \u{7965}\ + \u{798f}\ + \u{9756}\ + \u{7cbe}\ + \u{8612}\ + \u{8af8}\ + \u{9038}\ + \u{90fd}\ + \u{98ef}\ + \u{98fc}\ + \u{9928}\ + \u{9db4}\ + \u{90de}\ + \u{96b7}\ + \u{4fae}\ + \u{50e7}\ + \u{514d}\ + \u{52c9}\ + \u{52e4}\ + \u{5351}\ + \u{559d}\ + \u{5606}\ + \u{5668}\ + \u{5840}\ + \u{58a8}\ + \u{5c64}\ + \u{6094}\ + \u{6168}\ + \u{618e}\ + \u{61f2}\ + \u{654f}\ + \u{65e2}\ + \u{6691}\ + \u{6885}\ + \u{6d77}\ + \u{6e1a}\ + \u{6f22}\ + \u{716e}\ + \u{722b}\ + \u{7422}\ + \u{7891}\ + \u{7949}\ + \u{7948}\ + \u{7950}\ + \u{7956}\ + \u{798d}\ + \u{798e}\ + \u{7a40}\ + \u{7a81}\ + \u{7bc0}\ + \u{7e09}\ + \u{7e41}\ + \u{7f72}\ + \u{8005}\ + \u{81ed}\ + \u{8279}\ + \u{8457}\ + \u{8910}\ + \u{8996}\ + \u{8b01}\ + \u{8b39}\ + \u{8cd3}\ + \u{8d08}\ + \u{8fb6}\ + \u{96e3}\ + \u{97ff}\ + \u{983b}\ + \u{6075}\ + \u{242ee}\ + \u{8218}\ + \u{4e26}\ + \u{51b5}\ + \u{5168}\ + \u{4f80}\ + \u{5145}\ + \u{5180}\ + \u{52c7}\ + \u{52fa}\ + \u{5555}\ + \u{5599}\ + \u{55e2}\ + \u{58b3}\ + \u{5944}\ + \u{5954}\ + \u{5a62}\ + \u{5b28}\ + \u{5ed2}\ + \u{5ed9}\ + \u{5f69}\ + \u{5fad}\ + \u{60d8}\ + \u{614e}\ + \u{6108}\ + \u{6160}\ + \u{6234}\ + \u{63c4}\ + \u{641c}\ + \u{6452}\ + \u{6556}\ + \u{671b}\ + \u{6756}\ + \u{6edb}\ + \u{6ecb}\ + \u{701e}\ + \u{77a7}\ + \u{7235}\ + \u{72af}\ + \u{7471}\ + \u{7506}\ + \u{753b}\ + \u{761d}\ + \u{761f}\ + \u{76db}\ + \u{76f4}\ + \u{774a}\ + \u{7740}\ + \u{78cc}\ + \u{7ab1}\ + \u{7c7b}\ + \u{7d5b}\ + \u{7f3e}\ + \u{8352}\ + \u{83ef}\ + \u{8779}\ + \u{8941}\ + \u{8986}\ + \u{8abf}\ + \u{8acb}\ + \u{8aed}\ + \u{8b8a}\ + \u{8f38}\ + \u{9072}\ + \u{9199}\ + \u{9276}\ + \u{967c}\ + \u{97db}\ + \u{980b}\ + \u{9b12}\ + \u{2284a}\ + \u{22844}\ + \u{233d5}\ + \u{3b9d}\ + \u{4018}\ + \u{4039}\ + \u{25249}\ + \u{25cd0}\ + \u{27ed3}\ + \u{9f43}\ + \u{9f8e}\ + \u{66}\ + \u{66}\ + \u{66}\ + \u{69}\ + \u{66}\ + \u{6c}\ + \u{66}\ + \u{66}\ + \u{69}\ + \u{66}\ + \u{66}\ + \u{6c}\ + \u{73}\ + \u{74}\ + \u{574}\ + \u{576}\ + \u{574}\ + \u{565}\ + \u{574}\ + \u{56b}\ + \u{57e}\ + \u{576}\ + \u{574}\ + \u{56d}\ + \u{5d9}\ + \u{5b4}\ + \u{5f2}\ + \u{5b7}\ + \u{5e2}\ + \u{5d4}\ + \u{5db}\ + \u{5dc}\ + \u{5dd}\ + \u{5e8}\ + \u{5ea}\ + \u{5e9}\ + \u{5c1}\ + \u{5e9}\ + \u{5c2}\ + \u{5e9}\ + \u{5bc}\ + \u{5c1}\ + \u{5e9}\ + \u{5bc}\ + \u{5c2}\ + \u{5d0}\ + \u{5b7}\ + \u{5d0}\ + \u{5b8}\ + \u{5d0}\ + \u{5bc}\ + \u{5d1}\ + \u{5bc}\ + \u{5d2}\ + \u{5bc}\ + \u{5d3}\ + \u{5bc}\ + \u{5d4}\ + \u{5bc}\ + \u{5d5}\ + \u{5bc}\ + \u{5d6}\ + \u{5bc}\ + \u{5d8}\ + \u{5bc}\ + \u{5d9}\ + \u{5bc}\ + \u{5da}\ + \u{5bc}\ + \u{5db}\ + \u{5bc}\ + \u{5dc}\ + \u{5bc}\ + \u{5de}\ + \u{5bc}\ + \u{5e0}\ + \u{5bc}\ + \u{5e1}\ + \u{5bc}\ + \u{5e3}\ + \u{5bc}\ + \u{5e4}\ + \u{5bc}\ + \u{5e6}\ + \u{5bc}\ + \u{5e7}\ + \u{5bc}\ + \u{5e8}\ + \u{5bc}\ + \u{5e9}\ + \u{5bc}\ + \u{5ea}\ + \u{5bc}\ + \u{5d5}\ + \u{5b9}\ + \u{5d1}\ + \u{5bf}\ + \u{5db}\ + \u{5bf}\ + \u{5e4}\ + \u{5bf}\ + \u{5d0}\ + \u{5dc}\ + \u{671}\ + \u{67b}\ + \u{67e}\ + \u{680}\ + \u{67a}\ + \u{67f}\ + \u{679}\ + \u{6a4}\ + \u{6a6}\ + \u{684}\ + \u{683}\ + \u{686}\ + \u{687}\ + \u{68d}\ + \u{68c}\ + \u{68e}\ + \u{688}\ + \u{698}\ + \u{691}\ + \u{6a9}\ + \u{6af}\ + \u{6b3}\ + \u{6b1}\ + \u{6ba}\ + \u{6bb}\ + \u{6c0}\ + \u{6c1}\ + \u{6be}\ + \u{6d2}\ + \u{6d3}\ + \u{6ad}\ + \u{6c7}\ + \u{6c6}\ + \u{6c8}\ + \u{6cb}\ + \u{6c5}\ + \u{6c9}\ + \u{6d0}\ + \u{649}\ + \u{626}\ + \u{627}\ + \u{626}\ + \u{6d5}\ + \u{626}\ + \u{648}\ + \u{626}\ + \u{6c7}\ + \u{626}\ + \u{6c6}\ + \u{626}\ + \u{6c8}\ + \u{626}\ + \u{6d0}\ + \u{626}\ + \u{649}\ + \u{6cc}\ + \u{626}\ + \u{62c}\ + \u{626}\ + \u{62d}\ + \u{626}\ + \u{645}\ + \u{626}\ + \u{64a}\ + \u{628}\ + \u{62c}\ + \u{628}\ + \u{62d}\ + \u{628}\ + \u{62e}\ + \u{628}\ + \u{645}\ + \u{628}\ + \u{649}\ + \u{628}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{62a}\ + \u{62d}\ + \u{62a}\ + \u{62e}\ + \u{62a}\ + \u{645}\ + \u{62a}\ + \u{649}\ + \u{62a}\ + \u{64a}\ + \u{62b}\ + \u{62c}\ + \u{62b}\ + \u{645}\ + \u{62b}\ + \u{649}\ + \u{62b}\ + \u{64a}\ + \u{62c}\ + \u{62d}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{62e}\ + \u{62d}\ + \u{62e}\ + \u{645}\ + \u{633}\ + \u{62c}\ + \u{633}\ + \u{62d}\ + \u{633}\ + \u{62e}\ + \u{633}\ + \u{645}\ + \u{635}\ + \u{62d}\ + \u{635}\ + \u{645}\ + \u{636}\ + \u{62c}\ + \u{636}\ + \u{62d}\ + \u{636}\ + \u{62e}\ + \u{636}\ + \u{645}\ + \u{637}\ + \u{62d}\ + \u{637}\ + \u{645}\ + \u{638}\ + \u{645}\ + \u{639}\ + \u{62c}\ + \u{639}\ + \u{645}\ + \u{63a}\ + \u{62c}\ + \u{63a}\ + \u{645}\ + \u{641}\ + \u{62c}\ + \u{641}\ + \u{62d}\ + \u{641}\ + \u{62e}\ + \u{641}\ + \u{645}\ + \u{641}\ + \u{649}\ + \u{641}\ + \u{64a}\ + \u{642}\ + \u{62d}\ + \u{642}\ + \u{645}\ + \u{642}\ + \u{649}\ + \u{642}\ + \u{64a}\ + \u{643}\ + \u{627}\ + \u{643}\ + \u{62c}\ + \u{643}\ + \u{62d}\ + \u{643}\ + \u{62e}\ + \u{643}\ + \u{644}\ + \u{643}\ + \u{645}\ + \u{643}\ + \u{649}\ + \u{643}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{644}\ + \u{62d}\ + \u{644}\ + \u{62e}\ + \u{644}\ + \u{645}\ + \u{644}\ + \u{649}\ + \u{644}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{645}\ + \u{645}\ + \u{645}\ + \u{649}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{646}\ + \u{62d}\ + \u{646}\ + \u{62e}\ + \u{646}\ + \u{645}\ + \u{646}\ + \u{649}\ + \u{646}\ + \u{64a}\ + \u{647}\ + \u{62c}\ + \u{647}\ + \u{645}\ + \u{647}\ + \u{649}\ + \u{647}\ + \u{64a}\ + \u{64a}\ + \u{62c}\ + \u{64a}\ + \u{62d}\ + \u{64a}\ + \u{62e}\ + \u{64a}\ + \u{645}\ + \u{64a}\ + \u{649}\ + \u{64a}\ + \u{64a}\ + \u{630}\ + \u{670}\ + \u{631}\ + \u{670}\ + \u{649}\ + \u{670}\ + \u{20}\ + \u{64c}\ + \u{651}\ + \u{20}\ + \u{64d}\ + \u{651}\ + \u{20}\ + \u{64e}\ + \u{651}\ + \u{20}\ + \u{64f}\ + \u{651}\ + \u{20}\ + \u{650}\ + \u{651}\ + \u{20}\ + \u{651}\ + \u{670}\ + \u{626}\ + \u{631}\ + \u{626}\ + \u{632}\ + \u{626}\ + \u{646}\ + \u{628}\ + \u{631}\ + \u{628}\ + \u{632}\ + \u{628}\ + \u{646}\ + \u{62a}\ + \u{631}\ + \u{62a}\ + \u{632}\ + \u{62a}\ + \u{646}\ + \u{62b}\ + \u{631}\ + \u{62b}\ + \u{632}\ + \u{62b}\ + \u{646}\ + \u{645}\ + \u{627}\ + \u{646}\ + \u{631}\ + \u{646}\ + \u{632}\ + \u{646}\ + \u{646}\ + \u{64a}\ + \u{631}\ + \u{64a}\ + \u{632}\ + \u{64a}\ + \u{646}\ + \u{626}\ + \u{62e}\ + \u{626}\ + \u{647}\ + \u{628}\ + \u{647}\ + \u{62a}\ + \u{647}\ + \u{635}\ + \u{62e}\ + \u{644}\ + \u{647}\ + \u{646}\ + \u{647}\ + \u{647}\ + \u{670}\ + \u{64a}\ + \u{647}\ + \u{62b}\ + \u{647}\ + \u{633}\ + \u{647}\ + \u{634}\ + \u{645}\ + \u{634}\ + \u{647}\ + \u{640}\ + \u{64e}\ + \u{651}\ + \u{640}\ + \u{64f}\ + \u{651}\ + \u{640}\ + \u{650}\ + \u{651}\ + \u{637}\ + \u{649}\ + \u{637}\ + \u{64a}\ + \u{639}\ + \u{649}\ + \u{639}\ + \u{64a}\ + \u{63a}\ + \u{649}\ + \u{63a}\ + \u{64a}\ + \u{633}\ + \u{649}\ + \u{633}\ + \u{64a}\ + \u{634}\ + \u{649}\ + \u{634}\ + \u{64a}\ + \u{62d}\ + \u{649}\ + \u{62d}\ + \u{64a}\ + \u{62c}\ + \u{649}\ + \u{62c}\ + \u{64a}\ + \u{62e}\ + \u{649}\ + \u{62e}\ + \u{64a}\ + \u{635}\ + \u{649}\ + \u{635}\ + \u{64a}\ + \u{636}\ + \u{649}\ + \u{636}\ + \u{64a}\ + \u{634}\ + \u{62c}\ + \u{634}\ + \u{62d}\ + \u{634}\ + \u{62e}\ + \u{634}\ + \u{631}\ + \u{633}\ + \u{631}\ + \u{635}\ + \u{631}\ + \u{636}\ + \u{631}\ + \u{627}\ + \u{64b}\ + \u{62a}\ + \u{62c}\ + \u{645}\ + \u{62a}\ + \u{62d}\ + \u{62c}\ + \u{62a}\ + \u{62d}\ + \u{645}\ + \u{62a}\ + \u{62e}\ + \u{645}\ + \u{62a}\ + \u{645}\ + \u{62c}\ + \u{62a}\ + \u{645}\ + \u{62d}\ + \u{62a}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{62d}\ + \u{645}\ + \u{64a}\ + \u{62d}\ + \u{645}\ + \u{649}\ + \u{633}\ + \u{62d}\ + \u{62c}\ + \u{633}\ + \u{62c}\ + \u{62d}\ + \u{633}\ + \u{62c}\ + \u{649}\ + \u{633}\ + \u{645}\ + \u{62d}\ + \u{633}\ + \u{645}\ + \u{62c}\ + \u{633}\ + \u{645}\ + \u{645}\ + \u{635}\ + \u{62d}\ + \u{62d}\ + \u{635}\ + \u{645}\ + \u{645}\ + \u{634}\ + \u{62d}\ + \u{645}\ + \u{634}\ + \u{62c}\ + \u{64a}\ + \u{634}\ + \u{645}\ + \u{62e}\ + \u{634}\ + \u{645}\ + \u{645}\ + \u{636}\ + \u{62d}\ + \u{649}\ + \u{636}\ + \u{62e}\ + \u{645}\ + \u{637}\ + \u{645}\ + \u{62d}\ + \u{637}\ + \u{645}\ + \u{645}\ + \u{637}\ + \u{645}\ + \u{64a}\ + \u{639}\ + \u{62c}\ + \u{645}\ + \u{639}\ + \u{645}\ + \u{645}\ + \u{639}\ + \u{645}\ + \u{649}\ + \u{63a}\ + \u{645}\ + \u{645}\ + \u{63a}\ + \u{645}\ + \u{64a}\ + \u{63a}\ + \u{645}\ + \u{649}\ + \u{641}\ + \u{62e}\ + \u{645}\ + \u{642}\ + \u{645}\ + \u{62d}\ + \u{642}\ + \u{645}\ + \u{645}\ + \u{644}\ + \u{62d}\ + \u{645}\ + \u{644}\ + \u{62d}\ + \u{64a}\ + \u{644}\ + \u{62d}\ + \u{649}\ + \u{644}\ + \u{62c}\ + \u{62c}\ + \u{644}\ + \u{62e}\ + \u{645}\ + \u{644}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62d}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{645}\ + \u{62d}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62c}\ + \u{645}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{645}\ + \u{62e}\ + \u{645}\ + \u{645}\ + \u{62c}\ + \u{62e}\ + \u{647}\ + \u{645}\ + \u{62c}\ + \u{647}\ + \u{645}\ + \u{645}\ + \u{646}\ + \u{62d}\ + \u{645}\ + \u{646}\ + \u{62d}\ + \u{649}\ + \u{646}\ + \u{62c}\ + \u{645}\ + \u{646}\ + \u{62c}\ + \u{649}\ + \u{646}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{645}\ + \u{649}\ + \u{64a}\ + \u{645}\ + \u{645}\ + \u{628}\ + \u{62e}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{649}\ + \u{62a}\ + \u{62e}\ + \u{64a}\ + \u{62a}\ + \u{62e}\ + \u{649}\ + \u{62a}\ + \u{645}\ + \u{64a}\ + \u{62a}\ + \u{645}\ + \u{649}\ + \u{62c}\ + \u{645}\ + \u{64a}\ + \u{62c}\ + \u{62d}\ + \u{649}\ + \u{62c}\ + \u{645}\ + \u{649}\ + \u{633}\ + \u{62e}\ + \u{649}\ + \u{635}\ + \u{62d}\ + \u{64a}\ + \u{634}\ + \u{62d}\ + \u{64a}\ + \u{636}\ + \u{62d}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{64a}\ + \u{644}\ + \u{645}\ + \u{64a}\ + \u{64a}\ + \u{62d}\ + \u{64a}\ + \u{64a}\ + \u{62c}\ + \u{64a}\ + \u{64a}\ + \u{645}\ + \u{64a}\ + \u{645}\ + \u{645}\ + \u{64a}\ + \u{642}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62d}\ + \u{64a}\ + \u{639}\ + \u{645}\ + \u{64a}\ + \u{643}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{645}\ + \u{643}\ + \u{645}\ + \u{645}\ + \u{62c}\ + \u{62d}\ + \u{64a}\ + \u{62d}\ + \u{62c}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{64a}\ + \u{641}\ + \u{645}\ + \u{64a}\ + \u{628}\ + \u{62d}\ + \u{64a}\ + \u{633}\ + \u{62e}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{64a}\ + \u{635}\ + \u{644}\ + \u{6d2}\ + \u{642}\ + \u{644}\ + \u{6d2}\ + \u{627}\ + \u{644}\ + \u{644}\ + \u{647}\ + \u{627}\ + \u{643}\ + \u{628}\ + \u{631}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62f}\ + \u{635}\ + \u{644}\ + \u{639}\ + \u{645}\ + \u{631}\ + \u{633}\ + \u{648}\ + \u{644}\ + \u{639}\ + \u{644}\ + \u{64a}\ + \u{647}\ + \u{648}\ + \u{633}\ + \u{644}\ + \u{645}\ + \u{635}\ + \u{644}\ + \u{649}\ + \u{635}\ + \u{644}\ + \u{649}\ + \u{20}\ + \u{627}\ + \u{644}\ + \u{644}\ + \u{647}\ + \u{20}\ + \u{639}\ + \u{644}\ + \u{64a}\ + \u{647}\ + \u{20}\ + \u{648}\ + \u{633}\ + \u{644}\ + \u{645}\ + \u{62c}\ + \u{644}\ + \u{20}\ + \u{62c}\ + \u{644}\ + \u{627}\ + \u{644}\ + \u{647}\ + \u{631}\ + \u{6cc}\ + \u{627}\ + \u{644}\ + \u{2c}\ + \u{3001}\ + \u{3a}\ + \u{21}\ + \u{3f}\ + \u{3016}\ + \u{3017}\ + \u{2014}\ + \u{2013}\ + \u{5f}\ + \u{7b}\ + \u{7d}\ + \u{3014}\ + \u{3015}\ + \u{3010}\ + \u{3011}\ + \u{300a}\ + \u{300b}\ + \u{300c}\ + \u{300d}\ + \u{300e}\ + \u{300f}\ + \u{5b}\ + \u{5d}\ + \u{23}\ + \u{26}\ + \u{2a}\ + \u{2d}\ + \u{3c}\ + \u{3e}\ + \u{5c}\ + \u{24}\ + \u{25}\ + \u{40}\ + \u{20}\ + \u{64b}\ + \u{640}\ + \u{64b}\ + \u{20}\ + \u{64c}\ + \u{20}\ + \u{64d}\ + \u{20}\ + \u{64e}\ + \u{640}\ + \u{64e}\ + \u{20}\ + \u{64f}\ + \u{640}\ + \u{64f}\ + \u{20}\ + \u{650}\ + \u{640}\ + \u{650}\ + \u{20}\ + \u{651}\ + \u{640}\ + \u{651}\ + \u{20}\ + \u{652}\ + \u{640}\ + \u{652}\ + \u{621}\ + \u{622}\ + \u{623}\ + \u{624}\ + \u{625}\ + \u{626}\ + \u{627}\ + \u{628}\ + \u{629}\ + \u{62a}\ + \u{62b}\ + \u{62c}\ + \u{62d}\ + \u{62e}\ + \u{62f}\ + \u{630}\ + \u{631}\ + \u{632}\ + \u{633}\ + \u{634}\ + \u{635}\ + \u{636}\ + \u{637}\ + \u{638}\ + \u{639}\ + \u{63a}\ + \u{641}\ + \u{642}\ + \u{643}\ + \u{644}\ + \u{645}\ + \u{646}\ + \u{647}\ + \u{648}\ + \u{64a}\ + \u{644}\ + \u{622}\ + \u{644}\ + \u{623}\ + \u{644}\ + \u{625}\ + \u{644}\ + \u{627}\ + \u{22}\ + \u{27}\ + \u{2f}\ + \u{5e}\ + \u{7c}\ + \u{7e}\ + \u{2985}\ + \u{2986}\ + \u{30fb}\ + \u{30a1}\ + \u{30a3}\ + \u{30a5}\ + \u{30a7}\ + \u{30a9}\ + \u{30e3}\ + \u{30e5}\ + \u{30e7}\ + \u{30c3}\ + \u{30fc}\ + \u{30f3}\ + \u{3099}\ + \u{309a}\ + \u{a2}\ + \u{a3}\ + \u{ac}\ + \u{a6}\ + \u{a5}\ + \u{20a9}\ + \u{2502}\ + \u{2190}\ + \u{2191}\ + \u{2192}\ + \u{2193}\ + \u{25a0}\ + \u{25cb}\ + \u{10428}\ + \u{10429}\ + \u{1042a}\ + \u{1042b}\ + \u{1042c}\ + \u{1042d}\ + \u{1042e}\ + \u{1042f}\ + \u{10430}\ + \u{10431}\ + \u{10432}\ + \u{10433}\ + \u{10434}\ + \u{10435}\ + \u{10436}\ + \u{10437}\ + \u{10438}\ + \u{10439}\ + \u{1043a}\ + \u{1043b}\ + \u{1043c}\ + \u{1043d}\ + \u{1043e}\ + \u{1043f}\ + \u{10440}\ + \u{10441}\ + \u{10442}\ + \u{10443}\ + \u{10444}\ + \u{10445}\ + \u{10446}\ + \u{10447}\ + \u{10448}\ + \u{10449}\ + \u{1044a}\ + \u{1044b}\ + \u{1044c}\ + \u{1044d}\ + \u{1044e}\ + \u{1044f}\ + \u{104d8}\ + \u{104d9}\ + \u{104da}\ + \u{104db}\ + \u{104dc}\ + \u{104dd}\ + \u{104de}\ + \u{104df}\ + \u{104e0}\ + \u{104e1}\ + \u{104e2}\ + \u{104e3}\ + \u{104e4}\ + \u{104e5}\ + \u{104e6}\ + \u{104e7}\ + \u{104e8}\ + \u{104e9}\ + \u{104ea}\ + \u{104eb}\ + \u{104ec}\ + \u{104ed}\ + \u{104ee}\ + \u{104ef}\ + \u{104f0}\ + \u{104f1}\ + \u{104f2}\ + \u{104f3}\ + \u{104f4}\ + \u{104f5}\ + \u{104f6}\ + \u{104f7}\ + \u{104f8}\ + \u{104f9}\ + \u{104fa}\ + \u{104fb}\ + \u{10cc0}\ + \u{10cc1}\ + \u{10cc2}\ + \u{10cc3}\ + \u{10cc4}\ + \u{10cc5}\ + \u{10cc6}\ + \u{10cc7}\ + \u{10cc8}\ + \u{10cc9}\ + \u{10cca}\ + \u{10ccb}\ + \u{10ccc}\ + \u{10ccd}\ + \u{10cce}\ + \u{10ccf}\ + \u{10cd0}\ + \u{10cd1}\ + \u{10cd2}\ + \u{10cd3}\ + \u{10cd4}\ + \u{10cd5}\ + \u{10cd6}\ + \u{10cd7}\ + \u{10cd8}\ + \u{10cd9}\ + \u{10cda}\ + \u{10cdb}\ + \u{10cdc}\ + \u{10cdd}\ + \u{10cde}\ + \u{10cdf}\ + \u{10ce0}\ + \u{10ce1}\ + \u{10ce2}\ + \u{10ce3}\ + \u{10ce4}\ + \u{10ce5}\ + \u{10ce6}\ + \u{10ce7}\ + \u{10ce8}\ + \u{10ce9}\ + \u{10cea}\ + \u{10ceb}\ + \u{10cec}\ + \u{10ced}\ + \u{10cee}\ + \u{10cef}\ + \u{10cf0}\ + \u{10cf1}\ + \u{10cf2}\ + \u{118c0}\ + \u{118c1}\ + \u{118c2}\ + \u{118c3}\ + \u{118c4}\ + \u{118c5}\ + \u{118c6}\ + \u{118c7}\ + \u{118c8}\ + \u{118c9}\ + \u{118ca}\ + \u{118cb}\ + \u{118cc}\ + \u{118cd}\ + \u{118ce}\ + \u{118cf}\ + \u{118d0}\ + \u{118d1}\ + \u{118d2}\ + \u{118d3}\ + \u{118d4}\ + \u{118d5}\ + \u{118d6}\ + \u{118d7}\ + \u{118d8}\ + \u{118d9}\ + \u{118da}\ + \u{118db}\ + \u{118dc}\ + \u{118dd}\ + \u{118de}\ + \u{118df}\ + \u{1d157}\ + \u{1d165}\ + \u{1d158}\ + \u{1d165}\ + \u{1d158}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d158}\ + \u{1d165}\ + \u{1d16f}\ + \u{1d158}\ + \u{1d165}\ + \u{1d170}\ + \u{1d158}\ + \u{1d165}\ + \u{1d171}\ + \u{1d158}\ + \u{1d165}\ + \u{1d172}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d16f}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d16f}\ + \u{131}\ + \u{237}\ + \u{2207}\ + \u{2202}\ + \u{1e922}\ + \u{1e923}\ + \u{1e924}\ + \u{1e925}\ + \u{1e926}\ + \u{1e927}\ + \u{1e928}\ + \u{1e929}\ + \u{1e92a}\ + \u{1e92b}\ + \u{1e92c}\ + \u{1e92d}\ + \u{1e92e}\ + \u{1e92f}\ + \u{1e930}\ + \u{1e931}\ + \u{1e932}\ + \u{1e933}\ + \u{1e934}\ + \u{1e935}\ + \u{1e936}\ + \u{1e937}\ + \u{1e938}\ + \u{1e939}\ + \u{1e93a}\ + \u{1e93b}\ + \u{1e93c}\ + \u{1e93d}\ + \u{1e93e}\ + \u{1e93f}\ + \u{1e940}\ + \u{1e941}\ + \u{1e942}\ + \u{1e943}\ + \u{66e}\ + \u{6a1}\ + \u{66f}\ + \u{30}\ + \u{2c}\ + \u{31}\ + \u{2c}\ + \u{32}\ + \u{2c}\ + \u{33}\ + \u{2c}\ + \u{34}\ + \u{2c}\ + \u{35}\ + \u{2c}\ + \u{36}\ + \u{2c}\ + \u{37}\ + \u{2c}\ + \u{38}\ + \u{2c}\ + \u{39}\ + \u{2c}\ + \u{3014}\ + \u{73}\ + \u{3015}\ + \u{77}\ + \u{7a}\ + \u{68}\ + \u{76}\ + \u{73}\ + \u{64}\ + \u{70}\ + \u{70}\ + \u{76}\ + \u{77}\ + \u{63}\ + \u{6d}\ + \u{63}\ + \u{6d}\ + \u{64}\ + \u{64}\ + \u{6a}\ + \u{307b}\ + \u{304b}\ + \u{30b3}\ + \u{30b3}\ + \u{5b57}\ + \u{53cc}\ + \u{30c7}\ + \u{591a}\ + \u{89e3}\ + \u{4ea4}\ + \u{6620}\ + \u{7121}\ + \u{524d}\ + \u{5f8c}\ + \u{518d}\ + \u{65b0}\ + \u{521d}\ + \u{7d42}\ + \u{8ca9}\ + \u{58f0}\ + \u{5439}\ + \u{6f14}\ + \u{6295}\ + \u{6355}\ + \u{904a}\ + \u{6307}\ + \u{6253}\ + \u{7981}\ + \u{7a7a}\ + \u{5408}\ + \u{6e80}\ + \u{7533}\ + \u{5272}\ + \u{55b6}\ + \u{914d}\ + \u{3014}\ + \u{672c}\ + \u{3015}\ + \u{3014}\ + \u{4e09}\ + \u{3015}\ + \u{3014}\ + \u{4e8c}\ + \u{3015}\ + \u{3014}\ + \u{5b89}\ + \u{3015}\ + \u{3014}\ + \u{70b9}\ + \u{3015}\ + \u{3014}\ + \u{6253}\ + \u{3015}\ + \u{3014}\ + \u{76d7}\ + \u{3015}\ + \u{3014}\ + \u{52dd}\ + \u{3015}\ + \u{3014}\ + \u{6557}\ + \u{3015}\ + \u{5f97}\ + \u{53ef}\ + \u{4e3d}\ + \u{4e38}\ + \u{4e41}\ + \u{20122}\ + \u{4f60}\ + \u{4fbb}\ + \u{5002}\ + \u{507a}\ + \u{5099}\ + \u{50cf}\ + \u{349e}\ + \u{2063a}\ + \u{5154}\ + \u{5164}\ + \u{5177}\ + \u{2051c}\ + \u{34b9}\ + \u{5167}\ + \u{2054b}\ + \u{5197}\ + \u{51a4}\ + \u{4ecc}\ + \u{51ac}\ + \u{291df}\ + \u{5203}\ + \u{34df}\ + \u{523b}\ + \u{5246}\ + \u{5277}\ + \u{3515}\ + \u{5305}\ + \u{5306}\ + \u{5349}\ + \u{535a}\ + \u{5373}\ + \u{537d}\ + \u{537f}\ + \u{20a2c}\ + \u{7070}\ + \u{53ca}\ + \u{53df}\ + \u{20b63}\ + \u{53eb}\ + \u{53f1}\ + \u{5406}\ + \u{549e}\ + \u{5438}\ + \u{5448}\ + \u{5468}\ + \u{54a2}\ + \u{54f6}\ + \u{5510}\ + \u{5553}\ + \u{5563}\ + \u{5584}\ + \u{55ab}\ + \u{55b3}\ + \u{55c2}\ + \u{5716}\ + \u{5717}\ + \u{5651}\ + \u{5674}\ + \u{58ee}\ + \u{57ce}\ + \u{57f4}\ + \u{580d}\ + \u{578b}\ + \u{5832}\ + \u{5831}\ + \u{58ac}\ + \u{214e4}\ + \u{58f2}\ + \u{58f7}\ + \u{5906}\ + \u{5922}\ + \u{5962}\ + \u{216a8}\ + \u{216ea}\ + \u{59ec}\ + \u{5a1b}\ + \u{5a27}\ + \u{59d8}\ + \u{5a66}\ + \u{36ee}\ + \u{5b08}\ + \u{5b3e}\ + \u{219c8}\ + \u{5bc3}\ + \u{5bd8}\ + \u{5bf3}\ + \u{21b18}\ + \u{5bff}\ + \u{5c06}\ + \u{3781}\ + \u{5c60}\ + \u{5cc0}\ + \u{5c8d}\ + \u{21de4}\ + \u{5d43}\ + \u{21de6}\ + \u{5d6e}\ + \u{5d6b}\ + \u{5d7c}\ + \u{5de1}\ + \u{5de2}\ + \u{382f}\ + \u{5dfd}\ + \u{5e28}\ + \u{5e3d}\ + \u{5e69}\ + \u{3862}\ + \u{22183}\ + \u{387c}\ + \u{5eb0}\ + \u{5eb3}\ + \u{5eb6}\ + \u{2a392}\ + \u{22331}\ + \u{8201}\ + \u{5f22}\ + \u{38c7}\ + \u{232b8}\ + \u{261da}\ + \u{5f62}\ + \u{5f6b}\ + \u{38e3}\ + \u{5f9a}\ + \u{5fcd}\ + \u{5fd7}\ + \u{5ff9}\ + \u{6081}\ + \u{393a}\ + \u{391c}\ + \u{226d4}\ + \u{60c7}\ + \u{6148}\ + \u{614c}\ + \u{617a}\ + \u{61b2}\ + \u{61a4}\ + \u{61af}\ + \u{61de}\ + \u{6210}\ + \u{621b}\ + \u{625d}\ + \u{62b1}\ + \u{62d4}\ + \u{6350}\ + \u{22b0c}\ + \u{633d}\ + \u{62fc}\ + \u{6368}\ + \u{6383}\ + \u{63e4}\ + \u{22bf1}\ + \u{6422}\ + \u{63c5}\ + \u{63a9}\ + \u{3a2e}\ + \u{6469}\ + \u{647e}\ + \u{649d}\ + \u{6477}\ + \u{3a6c}\ + \u{656c}\ + \u{2300a}\ + \u{65e3}\ + \u{66f8}\ + \u{6649}\ + \u{3b19}\ + \u{3b08}\ + \u{3ae4}\ + \u{5192}\ + \u{5195}\ + \u{6700}\ + \u{669c}\ + \u{80ad}\ + \u{43d9}\ + \u{6721}\ + \u{675e}\ + \u{6753}\ + \u{233c3}\ + \u{3b49}\ + \u{67fa}\ + \u{6785}\ + \u{6852}\ + \u{2346d}\ + \u{688e}\ + \u{681f}\ + \u{6914}\ + \u{6942}\ + \u{69a3}\ + \u{69ea}\ + \u{6aa8}\ + \u{236a3}\ + \u{6adb}\ + \u{3c18}\ + \u{6b21}\ + \u{238a7}\ + \u{6b54}\ + \u{3c4e}\ + \u{6b72}\ + \u{6b9f}\ + \u{6bbb}\ + \u{23a8d}\ + \u{21d0b}\ + \u{23afa}\ + \u{6c4e}\ + \u{23cbc}\ + \u{6cbf}\ + \u{6ccd}\ + \u{6c67}\ + \u{6d16}\ + \u{6d3e}\ + \u{6d69}\ + \u{6d78}\ + \u{6d85}\ + \u{23d1e}\ + \u{6d34}\ + \u{6e2f}\ + \u{6e6e}\ + \u{3d33}\ + \u{6ec7}\ + \u{23ed1}\ + \u{6df9}\ + \u{6f6e}\ + \u{23f5e}\ + \u{23f8e}\ + \u{6fc6}\ + \u{7039}\ + \u{701b}\ + \u{3d96}\ + \u{704a}\ + \u{707d}\ + \u{7077}\ + \u{70ad}\ + \u{20525}\ + \u{7145}\ + \u{24263}\ + \u{719c}\ + \u{7228}\ + \u{7250}\ + \u{24608}\ + \u{7280}\ + \u{7295}\ + \u{24735}\ + \u{24814}\ + \u{737a}\ + \u{738b}\ + \u{3eac}\ + \u{73a5}\ + \u{3eb8}\ + \u{7447}\ + \u{745c}\ + \u{7485}\ + \u{74ca}\ + \u{3f1b}\ + \u{7524}\ + \u{24c36}\ + \u{753e}\ + \u{24c92}\ + \u{2219f}\ + \u{7610}\ + \u{24fa1}\ + \u{24fb8}\ + \u{25044}\ + \u{3ffc}\ + \u{4008}\ + \u{250f3}\ + \u{250f2}\ + \u{25119}\ + \u{25133}\ + \u{771e}\ + \u{771f}\ + \u{778b}\ + \u{4046}\ + \u{4096}\ + \u{2541d}\ + \u{784e}\ + \u{40e3}\ + \u{25626}\ + \u{2569a}\ + \u{256c5}\ + \u{79eb}\ + \u{412f}\ + \u{7a4a}\ + \u{7a4f}\ + \u{2597c}\ + \u{25aa7}\ + \u{4202}\ + \u{25bab}\ + \u{7bc6}\ + \u{7bc9}\ + \u{4227}\ + \u{25c80}\ + \u{7cd2}\ + \u{42a0}\ + \u{7ce8}\ + \u{7ce3}\ + \u{7d00}\ + \u{25f86}\ + \u{7d63}\ + \u{4301}\ + \u{7dc7}\ + \u{7e02}\ + \u{7e45}\ + \u{4334}\ + \u{26228}\ + \u{26247}\ + \u{4359}\ + \u{262d9}\ + \u{7f7a}\ + \u{2633e}\ + \u{7f95}\ + \u{7ffa}\ + \u{264da}\ + \u{26523}\ + \u{8060}\ + \u{265a8}\ + \u{8070}\ + \u{2335f}\ + \u{43d5}\ + \u{80b2}\ + \u{8103}\ + \u{440b}\ + \u{813e}\ + \u{5ab5}\ + \u{267a7}\ + \u{267b5}\ + \u{23393}\ + \u{2339c}\ + \u{8204}\ + \u{8f9e}\ + \u{446b}\ + \u{8291}\ + \u{828b}\ + \u{829d}\ + \u{52b3}\ + \u{82b1}\ + \u{82b3}\ + \u{82bd}\ + \u{82e6}\ + \u{26b3c}\ + \u{831d}\ + \u{8363}\ + \u{83ad}\ + \u{8323}\ + \u{83bd}\ + \u{83e7}\ + \u{8353}\ + \u{83ca}\ + \u{83cc}\ + \u{83dc}\ + \u{26c36}\ + \u{26d6b}\ + \u{26cd5}\ + \u{452b}\ + \u{84f1}\ + \u{84f3}\ + \u{8516}\ + \u{273ca}\ + \u{8564}\ + \u{26f2c}\ + \u{455d}\ + \u{4561}\ + \u{26fb1}\ + \u{270d2}\ + \u{456b}\ + \u{8650}\ + \u{8667}\ + \u{8669}\ + \u{86a9}\ + \u{8688}\ + \u{870e}\ + \u{86e2}\ + \u{8728}\ + \u{876b}\ + \u{8786}\ + \u{87e1}\ + \u{8801}\ + \u{45f9}\ + \u{8860}\ + \u{27667}\ + \u{88d7}\ + \u{88de}\ + \u{4635}\ + \u{88fa}\ + \u{34bb}\ + \u{278ae}\ + \u{27966}\ + \u{46be}\ + \u{46c7}\ + \u{8aa0}\ + \u{27ca8}\ + \u{8cab}\ + \u{8cc1}\ + \u{8d1b}\ + \u{8d77}\ + \u{27f2f}\ + \u{20804}\ + \u{8dcb}\ + \u{8dbc}\ + \u{8df0}\ + \u{208de}\ + \u{8ed4}\ + \u{285d2}\ + \u{285ed}\ + \u{9094}\ + \u{90f1}\ + \u{9111}\ + \u{2872e}\ + \u{911b}\ + \u{9238}\ + \u{92d7}\ + \u{92d8}\ + \u{927c}\ + \u{93f9}\ + \u{9415}\ + \u{28bfa}\ + \u{958b}\ + \u{4995}\ + \u{95b7}\ + \u{28d77}\ + \u{49e6}\ + \u{96c3}\ + \u{5db2}\ + \u{9723}\ + \u{29145}\ + \u{2921a}\ + \u{4a6e}\ + \u{4a76}\ + \u{97e0}\ + \u{2940a}\ + \u{4ab2}\ + \u{29496}\ + \u{9829}\ + \u{295b6}\ + \u{98e2}\ + \u{4b33}\ + \u{9929}\ + \u{99a7}\ + \u{99c2}\ + \u{99fe}\ + \u{4bce}\ + \u{29b30}\ + \u{9c40}\ + \u{9cfd}\ + \u{4cce}\ + \u{4ced}\ + \u{9d67}\ + \u{2a0ce}\ + \u{4cf8}\ + \u{2a105}\ + \u{2a20e}\ + \u{2a291}\ + \u{4d56}\ + \u{9efe}\ + \u{9f05}\ + \u{9f0f}\ + \u{9f16}\ + \u{2a600}"; diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/IdnaTest.txt b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/IdnaTest.txt new file mode 100644 index 000000000..f0b240a65 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/IdnaTest.txt @@ -0,0 +1,7848 @@ +# IdnaTest.txt +# Date: 2017-06-02, 14:19:52 GMT +# © 2017 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Contains test cases for verifying UTS46 conformance. For more information, +# see http://www.unicode.org/reports/tr46/ +# +# FORMAT: +# +# This file is in UTF8, with certain characters escaped using the \uXXXX or \x{XXXX} +# convention where they could otherwise have a confusing display. +# These characters include: +# +# - General Categories C, Z, and M +# - Default ignorable characters +# - Bidi categories R, AL, AN +# +# Columns (c1, c2,...) are separated by semicolons. +# Leading and trailing spaces and tabs in each column are ignored. +# Comments are indicated with hash marks. +# +# Column 1: type - T for transitional, N for nontransitional, B for both +# Column 2: source - The source string to be tested +# Column 3: toUnicode - The result of applying toUnicode to the source, using nontransitional. +# A blank value means the same as the source value; a value in [...] is a set of error codes. +# Column 4: toASCII - The result of applying toASCII to the source, using the specified type: T, N, or B. +# A blank value means the same as the toUnicode value; a value in [...] is a set of error codes. +# Column 5: idna2008 - NV8 is only present if the status is valid but the character is excluded by IDNA2008 +# from all domain names for all versions of Unicode. +# XV8 is present when the character is excluded by IDNA2008 for the current version of Unicode. +# These are informative values only. +# +# If the value of toUnicode is the same as source, the column will be blank. +# The line comments currently show visible characters that have been escaped +# (after removing default-ignorables and controls, except for whitespace) +# +# The test is performed with the following flag settings: +# +# VerifyDnsLength: true +# CheckHyphens: true +# CheckBidi: true +# CheckJoiners: true +# UseSTD3ASCIIRules: true +# +# An error in toUnicode or toASCII is indicated by a value in square brackets, such as "[B5 B6]". +# In such a case, the contents is a list of error codes based on the step numbers in UTS46 and IDNA2008, +# with the following formats: +# +# Pn for Section 4 Processing step n +# Vn for 4.1 Validity Criteria step n +# An for 4.2 ToASCII step n +# Bn for Bidi (in IDNA2008) +# Cn for ContextJ (in IDNA2008) +# +# However, these particular error codes are only informative; +# the important feature is whether or not there is an error. +# +# CONFORMANCE: +# +# To test for conformance to UTS46, an implementation must first perform the toUnicode operation +# on the source string, then the toASCII operation (with the indicated type) on the source string. +# Implementations may be more strict than UTS46; thus they may have errors where the file indicates results. +# In particular, an implementation conformant to IDNA2008 would disallow the input for lines marked with NV8. +# +# Moreover, the error codes in the file are informative; implementations need only record that there is an error: +# they need not reproduce those codes. Thus to then verify conformance for the toASCII and toUnicode columns: +# +# - If the file indicates an error, the implementation must also have an error. +# - If the file does not indicate an error, then the implementation must either have an error, +# or must have a matching result. +# +# ==================================================================================================== +B; fass.de; ; +T; faß.de; ; fass.de +N; faß.de; ; xn--fa-hia.de +T; Faß.de; faß.de; fass.de +N; Faß.de; faß.de; xn--fa-hia.de +B; xn--fa-hia.de; faß.de; xn--fa-hia.de + +# BIDI TESTS + +B; à\u05D0; [B5 B6]; [B5 B6] # àא +B; a\u0300\u05D0; [B5 B6]; [B5 B6] # àא +B; A\u0300\u05D0; [B5 B6]; [B5 B6] # àא +B; À\u05D0; [B5 B6]; [B5 B6] # àא +B; xn--0ca24w; [B5 B6]; [B5 B6] # àא +B; 0à.\u05D0; [B1]; [B1] # 0à.א +B; 0a\u0300.\u05D0; [B1]; [B1] # 0à.א +B; 0A\u0300.\u05D0; [B1]; [B1] # 0à.א +B; 0À.\u05D0; [B1]; [B1] # 0à.א +B; xn--0-sfa.xn--4db; [B1]; [B1] # 0à.א +B; à.\u05D0\u0308; ; xn--0ca.xn--ssa73l # à.א̈ +B; a\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; A\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; À.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; xn--0ca.xn--ssa73l; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; à.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; a\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; A\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; À.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; xn--0ca.xn--0-zhcb98c; [B4]; [B4] # à.א0٠א +B; \u0308.\u05D0; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א +B; xn--ssa.xn--4db; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א +B; à.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; a\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; A\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; À.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; xn--0ca.xn--0-zhc74b; [B4]; [B4] # à.א0٠ +B; àˇ.\u05D0; [B6]; [B6] # àˇ.א +B; a\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א +B; A\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א +B; Àˇ.\u05D0; [B6]; [B6] # àˇ.א +B; xn--0ca88g.xn--4db; [B6]; [B6] # àˇ.א +B; à\u0308.\u05D0; ; xn--0ca81i.xn--4db # à̈.א +B; a\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; A\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; À\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; xn--0ca81i.xn--4db; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א + +# CONTEXT TESTS + +T; a\u200Cb; [C1]; ab # ab +N; a\u200Cb; [C1]; [C1] # ab +T; A\u200CB; [C1]; ab # ab +N; A\u200CB; [C1]; [C1] # ab +T; A\u200Cb; [C1]; ab # ab +N; A\u200Cb; [C1]; [C1] # ab +B; ab; ; +B; xn--ab-j1t; [C1]; [C1] # ab +T; a\u094D\u200Cb; ; xn--ab-fsf # a्b +N; a\u094D\u200Cb; ; xn--ab-fsf604u # a्b +T; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf # a्b +N; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf604u # a्b +T; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf # a्b +N; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf604u # a्b +B; xn--ab-fsf; a\u094Db; xn--ab-fsf # a्b +B; a\u094Db; ; xn--ab-fsf # a्b +B; A\u094DB; a\u094Db; xn--ab-fsf # a्b +B; A\u094Db; a\u094Db; xn--ab-fsf # a्b +B; xn--ab-fsf604u; a\u094D\u200Cb; xn--ab-fsf604u # a्b +T; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +T; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +B; xn--b-bcba413a; [B1 V5]; [B1 V5] # ̈̈بb +B; xn--b-bcba413a2w8b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +T; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ +N; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +T; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ +N; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +B; xn--a-ccba213a; [B5 B6]; [B5 B6] # aب̈̈ +B; xn--a-ccba213a5w8b; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +T; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +N; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +T; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb +T; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +B; xn--ab-uuba211bca; [B5]; [B5] # aب̈̈بb +B; xn--ab-uuba211bca8057b; [B5]; [B5] # aب̈̈بb +T; a\u200Db; [C2]; ab # ab +N; a\u200Db; [C2]; [C2] # ab +T; A\u200DB; [C2]; ab # ab +N; A\u200DB; [C2]; [C2] # ab +T; A\u200Db; [C2]; ab # ab +N; A\u200Db; [C2]; [C2] # ab +B; xn--ab-m1t; [C2]; [C2] # ab +T; a\u094D\u200Db; ; xn--ab-fsf # a्b +N; a\u094D\u200Db; ; xn--ab-fsf014u # a्b +T; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf # a्b +N; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf014u # a्b +T; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf # a्b +N; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf014u # a्b +B; xn--ab-fsf014u; a\u094D\u200Db; xn--ab-fsf014u # a्b +T; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +T; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +B; xn--b-bcba413a7w8b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +T; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ +N; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +T; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ +N; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +B; xn--a-ccba213abx8b; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +T; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb +N; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb +T; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5 C2] # aب̈̈بb +T; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb +B; xn--ab-uuba211bca5157b; [B5 C2]; [B5 C2] # aب̈̈بb + +# SELECTED TESTS + +B; ¡; ; xn--7a; NV8 +B; xn--7a; ¡; xn--7a; NV8 +B; ᧚; ; xn--pkf; XV8 +B; xn--pkf; ᧚; xn--pkf; XV8 +B; 。; [A4_2]; [A4_2] +B; .; [A4_2]; [A4_2] +B; ꭠ; ; xn--3y9a +B; xn--3y9a; ꭠ; xn--3y9a +B; 1234567890ä1234567890123456789012345678901234567890123456; ; [A4_2] +B; 1234567890a\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; 1234567890A\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; 1234567890Ä1234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; xn--12345678901234567890123456789012345678901234567890123456-fxe; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; www.eXample.cOm; www.example.com; +B; Bücher.de; bücher.de; xn--bcher-kva.de +B; Bu\u0308cher.de; bücher.de; xn--bcher-kva.de +B; bu\u0308cher.de; bücher.de; xn--bcher-kva.de +B; bücher.de; ; xn--bcher-kva.de +B; BÜCHER.DE; bücher.de; xn--bcher-kva.de +B; BU\u0308CHER.DE; bücher.de; xn--bcher-kva.de +B; xn--bcher-kva.de; bücher.de; xn--bcher-kva.de +B; ÖBB; öbb; xn--bb-eka +B; O\u0308BB; öbb; xn--bb-eka +B; o\u0308bb; öbb; xn--bb-eka +B; öbb; ; xn--bb-eka +B; Öbb; öbb; xn--bb-eka +B; O\u0308bb; öbb; xn--bb-eka +B; xn--bb-eka; öbb; xn--bb-eka +T; βόλος.com; ; xn--nxasmq6b.com +N; βόλος.com; ; xn--nxasmm1c.com +T; βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com +N; βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com +B; ΒΟ\u0301ΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com +B; ΒΌΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com +B; βόλοσ.com; ; xn--nxasmq6b.com +B; βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; Βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; Βόλοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; xn--nxasmq6b.com; βόλοσ.com; xn--nxasmq6b.com +T; Βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com +N; Βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com +T; Βόλος.com; βόλος.com; xn--nxasmq6b.com +N; Βόλος.com; βόλος.com; xn--nxasmm1c.com +B; xn--nxasmm1c.com; βόλος.com; xn--nxasmm1c.com +B; xn--nxasmm1c; βόλος; xn--nxasmm1c +T; βόλος; ; xn--nxasmq6b +N; βόλος; ; xn--nxasmm1c +T; βο\u0301λος; βόλος; xn--nxasmq6b +N; βο\u0301λος; βόλος; xn--nxasmm1c +B; ΒΟ\u0301ΛΟΣ; βόλοσ; xn--nxasmq6b +B; ΒΌΛΟΣ; βόλοσ; xn--nxasmq6b +B; βόλοσ; ; xn--nxasmq6b +B; βο\u0301λοσ; βόλοσ; xn--nxasmq6b +B; Βο\u0301λοσ; βόλοσ; xn--nxasmq6b +B; Βόλοσ; βόλοσ; xn--nxasmq6b +B; xn--nxasmq6b; βόλοσ; xn--nxasmq6b +T; Βόλος; βόλος; xn--nxasmq6b +N; Βόλος; βόλος; xn--nxasmm1c +T; Βο\u0301λος; βόλος; xn--nxasmq6b +N; Βο\u0301λος; βόλος; xn--nxasmm1c +T; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com +N; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +N; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +N; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +B; www.xn--10cl1a0b.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; www.ශ\u0DCAර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com +B; WWW.ශ\u0DCAර\u0DD3.COM; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; Www.ශ\u0DCAර\u0DD3.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; www.xn--10cl1a0b660p.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f # نامهای +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f060k # نامهای +B; xn--mgba3gch31f; \u0646\u0627\u0645\u0647\u0627\u06CC; xn--mgba3gch31f # نامهای +B; \u0646\u0627\u0645\u0647\u0627\u06CC; ; xn--mgba3gch31f # نامهای +B; xn--mgba3gch31f060k; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; xn--mgba3gch31f060k # نامهای +B; xn--mgba3gch31f060k.com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +B; xn--mgba3gch31f.com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; a.b.c。d。; a.b.c.d.; +B; a.b.c。d。; a.b.c.d.; +B; A.B.C。D。; a.b.c.d.; +B; A.b.c。D。; a.b.c.d.; +B; a.b.c.d.; ; +B; A.B.C。D。; a.b.c.d.; +B; A.b.c。D。; a.b.c.d.; +B; U\u0308.xn--tda; ü.ü; xn--tda.xn--tda +B; Ü.xn--tda; ü.ü; xn--tda.xn--tda +B; ü.xn--tda; ü.ü; xn--tda.xn--tda +B; u\u0308.xn--tda; ü.ü; xn--tda.xn--tda +B; U\u0308.XN--TDA; ü.ü; xn--tda.xn--tda +B; Ü.XN--TDA; ü.ü; xn--tda.xn--tda +B; Ü.xn--Tda; ü.ü; xn--tda.xn--tda +B; U\u0308.xn--Tda; ü.ü; xn--tda.xn--tda +B; xn--tda.xn--tda; ü.ü; xn--tda.xn--tda +B; ü.ü; ; xn--tda.xn--tda +B; u\u0308.u\u0308; ü.ü; xn--tda.xn--tda +B; U\u0308.U\u0308; ü.ü; xn--tda.xn--tda +B; Ü.Ü; ü.ü; xn--tda.xn--tda +B; Ü.ü; ü.ü; xn--tda.xn--tda +B; U\u0308.u\u0308; ü.ü; xn--tda.xn--tda +B; xn--u-ccb; [V1]; [V1] # ü +B; a⒈com; [P1 V6]; [P1 V6] +B; a1.com; ; +B; A⒈COM; [P1 V6]; [P1 V6] +B; A⒈Com; [P1 V6]; [P1 V6] +B; xn--acom-0w1b; [V6]; [V6] +B; xn--a-ecp.ru; [V6]; [V6] +B; xn--0.pt; [A3]; [A3] +B; xn--a.pt; [V6]; [V6] # .pt +B; xn--a-Ä.pt; [A3]; [A3] +B; xn--a-A\u0308.pt; [A3]; [A3] +B; xn--a-a\u0308.pt; [A3]; [A3] +B; xn--a-ä.pt; [A3]; [A3] +B; XN--A-Ä.PT; [A3]; [A3] +B; XN--A-A\u0308.PT; [A3]; [A3] +B; Xn--A-A\u0308.pt; [A3]; [A3] +B; Xn--A-Ä.pt; [A3]; [A3] +B; xn--xn--a--gua.pt; [V2]; [V2] +B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp +B; xn--wgv71a119e.jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語.jp; ; xn--wgv71a119e.jp +B; 日本語.JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語.Jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp +B; ☕; ; xn--53h; NV8 +B; xn--53h; ☕; xn--53h; NV8 +T; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +N; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; ; [A4_2] +B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa69989dba9gc; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +N; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +B; 1.xn--abcdexyz-qyacaaabaaaaaaabaaaaaaaaabaaaaaaaaabaaaaaaaa010ze2isb1140zba8cc; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +T; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; xn--bss # xn--bß +N; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; [C1 C2] # xn--bß +T; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; xn--bss # xn--bss +N; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; [C1 C2] # xn--bss +T; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; xn--bss # xn--bss +N; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; [C1 C2] # xn--bss +T; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; xn--bss # xn--bss +N; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; [C1 C2] # xn--bss +B; xn--bss; 夙; xn--bss +B; 夙; ; xn--bss +B; xn--xn--bss-7z6ccid; [C1 C2]; [C1 C2] # xn--bss +T; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; xn--bss # xn--bß +N; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; [C1 C2] # xn--bß +B; xn--xn--b-pqa5796ccahd; [C1 C2]; [C1 C2] # xn--bß +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00ſ\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl +B; x\u034FN\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; x\u034Fn\u200B-\u00AD-\u180Cb\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; X\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064S󠇯FFL; 夡夞夜夙; xn--bssffl +B; X\u034Fn\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; xn--bssffl; 夡夞夜夙; xn--bssffl +B; 夡夞夜夙; ; xn--bssffl +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00S\u2064𝔰󠇯FFL; 夡夞夜夙; xn--bssffl +B; x\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064s󠇯FFL; 夡夞夜夙; xn--bssffl +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00s\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] +B; ä1234567890123456789012345678901234567890123456789012345; ; xn--1234567890123456789012345678901234567890123456789012345-9te +B; a\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; A\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; Ä1234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; xn--1234567890123456789012345678901234567890123456789012345-9te; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; a.b..-q--a-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..-q--ä-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..-q--a\u0308-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.B..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.B..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.b..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.b..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..xn---q----jra.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a..c; [A4_2]; [A4_2] +B; a.-b.; [V3]; [V3] +B; a.b-.c; [V3]; [V3] +B; a.-.c; [V3]; [V3] +B; a.bc--de.f; [V2]; [V2] +B; ä.\u00AD.c; [A4_2]; [A4_2] +B; a\u0308.\u00AD.c; [A4_2]; [A4_2] +B; A\u0308.\u00AD.C; [A4_2]; [A4_2] +B; Ä.\u00AD.C; [A4_2]; [A4_2] +B; xn--4ca..c; [A4_2]; [A4_2] +B; ä.-b.; [V3]; [V3] +B; a\u0308.-b.; [V3]; [V3] +B; A\u0308.-B.; [V3]; [V3] +B; Ä.-B.; [V3]; [V3] +B; xn--4ca.-b.; [V3]; [V3] +B; ä.b-.c; [V3]; [V3] +B; a\u0308.b-.c; [V3]; [V3] +B; A\u0308.B-.C; [V3]; [V3] +B; Ä.B-.C; [V3]; [V3] +B; Ä.b-.C; [V3]; [V3] +B; A\u0308.b-.C; [V3]; [V3] +B; xn--4ca.b-.c; [V3]; [V3] +B; ä.-.c; [V3]; [V3] +B; a\u0308.-.c; [V3]; [V3] +B; A\u0308.-.C; [V3]; [V3] +B; Ä.-.C; [V3]; [V3] +B; xn--4ca.-.c; [V3]; [V3] +B; ä.bc--de.f; [V2]; [V2] +B; a\u0308.bc--de.f; [V2]; [V2] +B; A\u0308.BC--DE.F; [V2]; [V2] +B; Ä.BC--DE.F; [V2]; [V2] +B; Ä.bc--De.f; [V2]; [V2] +B; A\u0308.bc--De.f; [V2]; [V2] +B; xn--4ca.bc--de.f; [V2]; [V2] +B; a.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d +B; A.B.\u0308C.D; [V5]; [V5] # a.b.̈c.d +B; A.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d +B; a.b.xn--c-bcb.d; [V5]; [V5] # a.b.̈c.d +B; A0; a0; +B; 0A; 0a; +B; 0A.\u05D0; [B1]; [B1] # 0a.א +B; 0a.\u05D0; [B1]; [B1] # 0a.א +B; 0a.xn--4db; [B1]; [B1] # 0a.א +B; c.xn--0-eha.xn--4db; [B1]; [B1] # c.0ü.א +B; b-.\u05D0; [B6 V3]; [B6 V3] # b-.א +B; B-.\u05D0; [B6 V3]; [B6 V3] # b-.א +B; b-.xn--4db; [B6 V3]; [B6 V3] # b-.א +B; d.xn----dha.xn--4db; [B6 V3]; [B6 V3] # d.ü-.א +B; a\u05D0; [B5 B6]; [B5 B6] # aא +B; A\u05D0; [B5 B6]; [B5 B6] # aא +B; xn--a-0hc; [B5 B6]; [B5 B6] # aא +B; \u05D0\u05C7; ; xn--vdbr # אׇ +B; xn--vdbr; \u05D0\u05C7; xn--vdbr # אׇ +B; \u05D09\u05C7; ; xn--9-ihcz # א9ׇ +B; xn--9-ihcz; \u05D09\u05C7; xn--9-ihcz # א9ׇ +B; \u05D0a\u05C7; [B2 B3]; [B2 B3] # אaׇ +B; \u05D0A\u05C7; [B2 B3]; [B2 B3] # אaׇ +B; xn--a-ihcz; [B2 B3]; [B2 B3] # אaׇ +B; \u05D0\u05EA; ; xn--4db6c # את +B; xn--4db6c; \u05D0\u05EA; xn--4db6c # את +B; \u05D0\u05F3\u05EA; ; xn--4db6c0a # א׳ת +B; xn--4db6c0a; \u05D0\u05F3\u05EA; xn--4db6c0a # א׳ת +B; a\u05D0Tz; [B5]; [B5] # aאtz +B; a\u05D0tz; [B5]; [B5] # aאtz +B; A\u05D0TZ; [B5]; [B5] # aאtz +B; A\u05D0tz; [B5]; [B5] # aאtz +B; xn--atz-qpe; [B5]; [B5] # aאtz +B; \u05D0T\u05EA; [B2]; [B2] # אtת +B; \u05D0t\u05EA; [B2]; [B2] # אtת +B; xn--t-zhc3f; [B2]; [B2] # אtת +B; \u05D07\u05EA; ; xn--7-zhc3f # א7ת +B; xn--7-zhc3f; \u05D07\u05EA; xn--7-zhc3f # א7ת +B; \u05D0\u0667\u05EA; ; xn--4db6c6t # א٧ת +B; xn--4db6c6t; \u05D0\u0667\u05EA; xn--4db6c6t # א٧ת +B; a7\u0667z; [B5]; [B5] # a7٧z +B; A7\u0667Z; [B5]; [B5] # a7٧z +B; A7\u0667z; [B5]; [B5] # a7٧z +B; xn--a7z-06e; [B5]; [B5] # a7٧z +B; \u05D07\u0667\u05EA; [B4]; [B4] # א7٧ת +B; xn--7-zhc3fty; [B4]; [B4] # א7٧ת +T; ஹ\u0BCD\u200D; ; xn--dmc4b # ஹ் +N; ஹ\u0BCD\u200D; ; xn--dmc4b194h # ஹ் +B; xn--dmc4b; ஹ\u0BCD; xn--dmc4b # ஹ் +B; ஹ\u0BCD; ; xn--dmc4b # ஹ் +B; xn--dmc4b194h; ஹ\u0BCD\u200D; xn--dmc4b194h # ஹ் +T; ஹ\u200D; [C2]; xn--dmc # ஹ +N; ஹ\u200D; [C2]; [C2] # ஹ +B; xn--dmc; ஹ; xn--dmc +B; ஹ; ; xn--dmc +B; xn--dmc225h; [C2]; [C2] # ஹ +T; \u200D; [C2]; [A4_2] # +N; \u200D; [C2]; [C2] # +B; ; [A4_2]; [A4_2] +B; xn--1ug; [C2]; [C2] # +T; ஹ\u0BCD\u200C; ; xn--dmc4b # ஹ் +N; ஹ\u0BCD\u200C; ; xn--dmc4by94h # ஹ் +B; xn--dmc4by94h; ஹ\u0BCD\u200C; xn--dmc4by94h # ஹ் +T; ஹ\u200C; [C1]; xn--dmc # ஹ +N; ஹ\u200C; [C1]; [C1] # ஹ +B; xn--dmc025h; [C1]; [C1] # ஹ +T; \u200C; [C1]; [A4_2] # +N; \u200C; [C1]; [C1] # +B; xn--0ug; [C1]; [C1] # +T; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ +N; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia7523a # لٰۭۯ +B; xn--ghb2gxqia; \u0644\u0670\u06ED\u06EF; xn--ghb2gxqia # لٰۭۯ +B; \u0644\u0670\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ +B; xn--ghb2gxqia7523a; \u0644\u0670\u200C\u06ED\u06EF; xn--ghb2gxqia7523a # لٰۭۯ +T; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3q # لٰۯ +N; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3qq34f # لٰۯ +B; xn--ghb2g3q; \u0644\u0670\u06EF; xn--ghb2g3q # لٰۯ +B; \u0644\u0670\u06EF; ; xn--ghb2g3q # لٰۯ +B; xn--ghb2g3qq34f; \u0644\u0670\u200C\u06EF; xn--ghb2g3qq34f # لٰۯ +T; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga # لۭۯ +N; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga828w # لۭۯ +B; xn--ghb25aga; \u0644\u06ED\u06EF; xn--ghb25aga # لۭۯ +B; \u0644\u06ED\u06EF; ; xn--ghb25aga # لۭۯ +B; xn--ghb25aga828w; \u0644\u200C\u06ED\u06EF; xn--ghb25aga828w # لۭۯ +T; \u0644\u200C\u06EF; ; xn--ghb65a # لۯ +N; \u0644\u200C\u06EF; ; xn--ghb65a953d # لۯ +B; xn--ghb65a; \u0644\u06EF; xn--ghb65a # لۯ +B; \u0644\u06EF; ; xn--ghb65a # لۯ +B; xn--ghb65a953d; \u0644\u200C\u06EF; xn--ghb65a953d # لۯ +T; \u0644\u0670\u200C\u06ED; [B3 C1]; xn--ghb2gxq # لٰۭ +N; \u0644\u0670\u200C\u06ED; [B3 C1]; [B3 C1] # لٰۭ +B; xn--ghb2gxq; \u0644\u0670\u06ED; xn--ghb2gxq # لٰۭ +B; \u0644\u0670\u06ED; ; xn--ghb2gxq # لٰۭ +B; xn--ghb2gxqy34f; [B3 C1]; [B3 C1] # لٰۭ +T; \u06EF\u200C\u06EF; [C1]; xn--cmba # ۯۯ +N; \u06EF\u200C\u06EF; [C1]; [C1] # ۯۯ +B; xn--cmba; \u06EF\u06EF; xn--cmba # ۯۯ +B; \u06EF\u06EF; ; xn--cmba # ۯۯ +B; xn--cmba004q; [C1]; [C1] # ۯۯ +T; \u0644\u200C; [B3 C1]; xn--ghb # ل +N; \u0644\u200C; [B3 C1]; [B3 C1] # ل +B; xn--ghb; \u0644; xn--ghb # ل +B; \u0644; ; xn--ghb # ل +B; xn--ghb413k; [B3 C1]; [B3 C1] # ل +B; a。。b; [A4_2]; [A4_2] +B; A。。B; [A4_2]; [A4_2] +B; a..b; [A4_2]; [A4_2] +T; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [A4_2] # ..ڹ +N; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ +B; ..xn--skb; [A4_2]; [A4_2] # ..ڹ +B; xn--1ug..xn--skb080k; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ +B; \u05D00\u0660; [B4]; [B4] # א0٠ +B; xn--0-zhc74b; [B4]; [B4] # א0٠ +B; $; [P1 V6]; [P1 V6] + +# RANDOMIZED TESTS + +B; c.0ü.\u05D0; [B1]; [B1] # c.0ü.א +B; c.0u\u0308.\u05D0; [B1]; [B1] # c.0ü.א +B; C.0U\u0308.\u05D0; [B1]; [B1] # c.0ü.א +B; C.0Ü.\u05D0; [B1]; [B1] # c.0ü.א +B; ⒕∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # ⒕∝ٟ.- +B; 14.∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # 14.∝ٟ.- +B; 14.xn--7hb713l3v90n.-; [V3 V6]; [V3 V6] # 14.∝ٟ.- +B; xn--7hb713lfwbi1311b.-; [V3 V6]; [V3 V6] # ⒕∝ٟ.- +B; ꡣ.\u07CF; ; xn--8c9a.xn--qsb # ꡣ.ߏ +B; xn--8c9a.xn--qsb; ꡣ.\u07CF; xn--8c9a.xn--qsb # ꡣ.ߏ +B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; xn--lfb566l.-; [B1 V3 V6]; [B1 V3 V6] # ≯.- +T; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +N; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +T; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +N; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +T; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +N; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +B; 走𐹧谷.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ +B; 走𐹧谷.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; 走𐹧谷.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; xn--6g3a1x434z.xn--4xa180eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.σⴍ +B; xn--6g3a1x434z.xn--4xa627dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.σႭ +B; xn--6g3a1x434z.xn--3xa380eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.ςⴍ +B; xn--6g3a1x434z.xn--3xa827dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.ςႭ +T; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +N; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +B; ⾛𐹧⾕.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ +B; ⾛𐹧⾕.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; ⾛𐹧⾕.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +T; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ +N; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ +T; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ +N; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ +T; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +N; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +T; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +N; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +B; xn--jbf911clb.xn----p9j493ivi4l; [V6]; [V6] +B; xn--jbf929a90b0b.xn----p9j493ivi4l; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴⴀ +B; xn--jbf911clb.xn----6zg521d196p; [V6]; [V6] +B; xn--jbf929a90b0b.xn----6zg521d196p; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴႠ +B; 񯞜.𐿇\u0FA2\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; xn--gw68a.xn--ifb57ev2psc6027m; [V6]; [V6] # .ྡྷݽ +B; 𣳔\u0303.𑓂; [V5]; [V5] # 𣳔̃.𑓂 +B; xn--nsa95820a.xn--wz1d; [V5]; [V5] # 𣳔̃.𑓂 +B; 𞤀𞥅񘐱。󠄌Ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞤢𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--9d6hgcy3556a.xn--rlju750b; [B2 B3 V6]; [B2 B3 V6] +B; xn--9d6hgcy3556a.xn--7nd0578e; [B2 B3 V6]; [B2 B3 V6] +B; 𞤀𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; xn--4xa53xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿σ𖬱.렧 +B; xn--3xa73xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿ς𖬱.렧 +T; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ +N; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ +T; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. +N; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +T; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. +N; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +B; -.xn--1-0i8r.; [B1 V3]; [B1 V3] +B; xn----ugn.xn--1-rgnd61297b.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +T; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ +N; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ +B; -.xn--tsh3666n; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugn.xn--0ugc555aiv51d; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𞤯⒈ +T; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .Ⴒ𑇀 +N; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .Ⴒ𑇀 +T; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .ⴒ𑇀 +N; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .ⴒ𑇀 +B; xn--bn95b.xn--9kj2034e; [V6]; [V6] +B; xn--0ug15083f.xn--9kj2034e; [C1 V6]; [C1 V6] # .ⴒ𑇀 +B; xn--bn95b.xn--qnd6272k; [V6]; [V6] +B; xn--0ug15083f.xn--qnd6272k; [C1 V6]; [C1 V6] # .Ⴒ𑇀 +T; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ +N; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ +T; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--gl0as212a.8. # 繱𑖿.8. +N; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. +B; xn--gl0as212a.8.; 繱𑖿.8.; xn--gl0as212a.8. +B; 繱𑖿.8.; ; xn--gl0as212a.8. +B; xn--1ug6928ac48e.8.; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. +T; 繱𑖿\u200D.8.; ; xn--gl0as212a.8. # 繱𑖿.8. +N; 繱𑖿\u200D.8.; ; xn--1ug6928ac48e.8. # 繱𑖿.8. +B; xn--gl0as212a.xn--8-o89h; [V6]; [V6] +B; xn--1ug6928ac48e.xn--8-o89h; [V6]; [V6] # 繱𑖿.8︒ +B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] +B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] +B; .xn--ph4h; [V5 A4_2]; [V5 A4_2] +T; ß\u06EB。\u200D; [C2]; xn--ss-59d. # ß۫. +N; ß\u06EB。\u200D; [C2]; [C2] # ß۫. +T; SS\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; SS\u06EB。\u200D; [C2]; [C2] # ss۫. +T; ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; ss\u06EB。\u200D; [C2]; [C2] # ss۫. +T; Ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; Ss\u06EB。\u200D; [C2]; [C2] # ss۫. +B; xn--ss-59d.; ss\u06EB.; xn--ss-59d. # ss۫. +B; ss\u06EB.; ; xn--ss-59d. # ss۫. +B; SS\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. +B; Ss\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. +B; xn--ss-59d.xn--1ug; [C2]; [C2] # ss۫. +B; xn--zca012a.xn--1ug; [C2]; [C2] # ß۫. +T; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [P1 V6] # ⒈. +N; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [C1 P1 V6] # ⒈. +T; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1.. +N; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1.. +B; xn--1-bs31m..xn--tv36e; [V6 A4_2]; [V6 A4_2] +B; xn--1-rgn37671n..xn--tv36e; [C1 V6 A4_2]; [C1 V6 A4_2] # 1.. +B; xn--tshz2001k.xn--tv36e; [V6]; [V6] +B; xn--0ug88o47900b.xn--tv36e; [C1 V6]; [C1 V6] # ⒈. +T; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. +N; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. +B; 󟈣\u065F\uAAB2SS。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; 󟈣\u065F\uAAB2ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; 󟈣\u065F\uAAB2Ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; xn--ss-3xd2839nncy1m.xn--bb79d; [V6]; [V6] # ٟꪲss. +B; xn--zca92z0t7n5w96j.xn--bb79d; [V6]; [V6] # ٟꪲß. +T; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 +N; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 +T; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 +N; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 +B; xn--4pb2977v.xn--z0nt555ukbnv; [V6]; [V6] # ݴ𞤿.䉜 +B; xn--4pb607jjt73a.xn--1ug236ke314donv1a; [C1 C2 V6]; [C1 C2 V6] # ݴ𞤿.䉜 +T; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +N; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +T; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +N; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +T; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +N; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +N; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +B; xn--1-zmb699meq63t..xn--gdh5392g6sd; [B1 V6 A4_2]; [B1 V6 A4_2] +B; xn--1-zmb699meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +B; xn--1-xmb999meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +B; xn--4xa207hkzinr77u.xn--gdh5392g6sd; [B1 V6]; [B1 V6] +B; xn--4xa207hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # σᡱ⒈.≮𑄳𐮍 +B; xn--3xa407hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # ςᡱ⒈.≮𑄳𐮍 +B; \u3164\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. +B; \u1160\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. +B; \u1160\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. +B; xn--n3b742bkqf4ty.; [V6]; [V6] # ्ⴀ័. +B; xn--n3b468aoqa89r.; [V6]; [V6] # ्Ⴀ័. +B; \u3164\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. +B; xn--n3b445e53po6d.; [V6]; [V6] # ्ⴀ័. +B; xn--n3b468azngju2a.; [V6]; [V6] # ्Ⴀ័. +T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ +N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ +N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +B; xn--pei.xn--0fb32q3w7q2g4d; [V5]; [V5] # ❣.্𑰽ؒꤩ +B; xn--1ugy10a.xn--0fb32q3w7q2g4d; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +B; ≮𐳺𐹄.≯񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] +B; <\u0338𐳺𐹄.>\u0338񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] +B; xn--gdh7943gk2a.xn--hdh1383c5e36c; [B1 V6]; [B1 V6] +B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; xn--7tc6360ky5bn2732c.xn--8tc429c; [B1 V5 V6]; [B1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0349。𧡫; [V5]; [V5] # ͉.𧡫 +B; xn--nua.xn--bc6k; [V5]; [V5] # ͉.𧡫 +B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. +B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. +B; xn--ok3d.xn--psd; [V5 V6]; [V5 V6] # 𑰿. +T; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. +N; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. +T; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. +N; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. +B; xn----ni8r.xn--846h96596c; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----ugnx367r.xn--846h96596c; [B1 B5 B6 C2 V3 V6]; [B1 B5 B6 C2 V3 V6] # -𞤨. +B; ꡏ󠇶≯𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶>\u0338𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶≯𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶>\u0338𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; xn--hdh7483cu6twwki8e.xn--yfg0765a58l0n6k; [B1 V5 V6]; [B1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; 蔏。𑰺; [V5]; [V5] +B; 蔏。𑰺; [V5]; [V5] +B; xn--uy1a.xn--jk3d; [V5]; [V5] +B; 𝟿𐮋。󠄊; [B1]; [B1] +B; 9𐮋。󠄊; [B1]; [B1] +B; xn--9-rv5i.; [B1]; [B1] +B; 󟇇-䟖F。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ +B; 󟇇-䟖F。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ +B; 󟇇-䟖f。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ +B; xn---f-mz8b08788k.xn--1-ybd.xn--bib; [B1 V6]; [B1 V6] # -䟖f.ߋ1.٢ +B; 󟇇-䟖f。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ +B; xn---f-mz8b08788k.xn--bib53ev44d; [B4 V6]; [B4 V6] # -䟖f.ߋ⒈٢ +T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 +N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 +T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 +N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 +B; .xn--yo0d; [B1 A4_2]; [B1 A4_2] +B; xn--0ug.xn--yo0d; [B1 C1]; [B1 C1] # .𐹺 +T; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +B; xn--le9c.xn----ogo9956r; [B1 V6]; [B1 V6] +B; xn--le9c.xn----rgn40iy359e; [B1 C1 V6]; [B1 C1 V6] # 𐡆.≯-𞥀 +B; 󠁀-。≠\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。=\u0338\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。≠\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。=\u0338\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; xn----f411m.xn--rgb7c611j; [B1 V3 V6]; [B1 V3 V6] # -.≠هج +T; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [P1 V6] # 𑈵. +N; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𑈵. +B; xn--8g1d12120a.xn--5l6h; [V6]; [V6] +B; xn--8g1d12120a.xn--1ug6651p; [B1 C2 V6]; [B1 C2 V6] # 𑈵. +B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 +B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 +B; xn--2-5z4eu89y.xn--97l02706d; [V5 V6]; [V5 V6] # 𑋧꧀2.㧉 +T; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 +N; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 +T; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 +N; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 +B; xn--so0du768aim9m.xn--6-ogo; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug7105gf5wfxepq.xn--6-ogo; [B1 C1 V6]; [B1 C1 V6] # 𐹴.≯6 +T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- +N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- +T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- +N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- +B; xn--q30d.xn----i26i1299n; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] +B; xn--q30d.xn----ugn1088hfsxv; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𑁿.𐹦- +T; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. +N; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. +T; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. +N; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. +B; ⤸Σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. +B; ⤸σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. +B; xn--4xa192qmp03d.xn--psd; [V6]; [V6] # ⤸σ. +B; xn--3xa392qmp03d.xn--psd; [V6]; [V6] # ⤸ς. +B; ⤸Σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. +B; ⤸σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. +B; xn--4xa192qmp03d.xn--cl7c; [V6]; [V6] # ⤸σ. +B; xn--3xa392qmp03d.xn--cl7c; [V6]; [V6] # ⤸ς. +B; \u0765\u1035𐫔\u06D5.𐦬𑋪Ⴃ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ +B; \u0765\u1035𐫔\u06D5.𐦬𑋪ⴃ; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ +B; xn--llb10as9tqp5y.xn--ukj7371e21f; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ +B; xn--llb10as9tqp5y.xn--bnd9168j21f; [B2 B3 V6]; [B2 B3 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ +B; \u0661\u1B44-킼.\u1BAA\u0616\u066C≯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; \u0661\u1B44-킼.\u1BAA\u0616\u066C>\u0338; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; xn----9pc551nk39n.xn--4fb6o571degg; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; -。\u06C2\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 +B; -。\u06C1\u0654\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 +B; -.xn--mfb39a7208dzgs3d; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -.ۂ𑓂 +T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 +N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 +T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 +N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 +B; xn--b726ey18m.xn--ldb8734fg0qcyzzg; [V5 V6]; [V5 V6] # .ֽꡝ𐋡 +B; xn--1ug66101lt8me.xn--ldb8734fg0qcyzzg; [C2 V5 V6]; [C2 V5 V6] # .ֽꡝ𐋡 +T; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +N; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +T; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +N; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。􃈵Σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。􃈵σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; .xn--4xa68573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] +B; .xn--3xa88573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] +B; ︒􃈵Σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +B; ︒􃈵σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +B; xn--4xa1729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] +B; xn--3xa3729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] +B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; xn--0sb.xn--bmb691l0524t; [B2 B3 V6]; [B2 B3 V6] # ߙ.ۮ≯ +B; \u1A73󚙸.𐭍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᩳ.𐭍 +B; xn--2of22352n.xn--q09c; [B1 V5 V6]; [B1 V5 V6] # ᩳ.𐭍 +B; ⒉󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; ⒉󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; 2.xn--1chz4101l.xn--45iz7d6b; [V6]; [V6] +B; 2.xn--1chz4101l.xn--gnd9b297j; [V6]; [V6] +B; ⒉󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; ⒉󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; xn--1ch07f91401d.xn--45iz7d6b; [V6]; [V6] +B; xn--1ch07f91401d.xn--gnd9b297j; [V6]; [V6] +B; -󠉱\u0FB8Ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 +B; -󠉱\u0FB8ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 +B; xn----xmg317tgv352a.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 +B; xn----xmg12fm2555h.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 +B; \u0659。𑄴︒\u0627\u07DD; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٙ.𑄴︒اߝ +B; \u0659。𑄴。\u0627\u07DD; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ +B; xn--1hb.xn--w80d.xn--mgb09f; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ +B; xn--1hb.xn--mgb09fp820c08pa; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٙ.𑄴︒اߝ +T; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # Ⴙظ. +N; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴙظ. +T; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B5 B6] # ⴙظ. +N; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. +B; xn--3gb910r.; [B5 B6]; [B5 B6] # ⴙظ. +B; xn--3gb910r.xn--1ug; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. +B; xn--3gb194c.; [B5 B6 V6]; [B5 B6 V6] # Ⴙظ. +B; xn--3gb194c.xn--1ug; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴙظ. +B; 󠆸。₆0𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ +B; 󠆸。60𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ +B; .xn--60-cke9470y; [B1 V6 A4_2]; [B1 V6 A4_2] # .60ݖ +B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +B; xn--6-jjd.xn----6n8i; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ +N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ +N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +B; xn--oo0d1330n.xn--4xa21xcwbfz15g; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𐹰.્σࣖ +B; xn--1ugx105gq26y.xn--4xa21xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્σࣖ +B; xn--1ugx105gq26y.xn--3xa41xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +B; ⒈񟄜Ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈Ⴓ⒪.්𐹢 +B; 1.񟄜Ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.񟄜ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 +B; 1.񟄜Ⴓ(O).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.xn--(o)-7sn88849j.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.xn--(o)-ej1bu5389e.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 +B; ⒈񟄜ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ⴓ⒪.්𐹢 +B; xn--tsh0ds63atl31n.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈ⴓ⒪.්𐹢 +B; xn--rnd762h7cx3027d.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈Ⴓ⒪.්𐹢 +B; 𞤷.𐮐𞢁𐹠\u0624; ; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤷.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤕.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤕.𐮐𞢁𐹠\u0624; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; xn--ve6h.xn--jgb1694kz0b2176a; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; xn----ue6i.xn--v80d6662t; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] +B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; -󠉖ꡧ.󠊂񇆃🄉; [P1 V3 V6]; [P1 V3 V6] +B; -󠉖ꡧ.󠊂񇆃8,; [P1 V3 V6]; [P1 V3 V6] +B; xn----hg4ei0361g.xn--8,-k362evu488a; [P1 V3 V6]; [P1 V3 V6] +B; xn----hg4ei0361g.xn--207ht163h7m94c; [V3 V6]; [V3 V6] +B; 󠾛󠈴臯𧔤.\u0768𝟝; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 +B; 󠾛󠈴臯𧔤.\u07685; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 +B; xn--zb1at733hm579ddhla.xn--5-b5c; [B1 V6]; [B1 V6] # 臯𧔤.ݨ5 +B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; xn--gdh1504g.xn--e92h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] +B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; xn--ybc101g3m1p.xn--740a; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; \u1B44\u115F𞷿򃀍.-; [B1 B5 P1 V3 V5 V6]; [B1 B5 P1 V3 V5 V6] # ᭄.- +B; xn--osd971cpx70btgt8b.-; [B1 B5 V3 V5 V6]; [B1 B5 V3 V5 V6] # ᭄.- +T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ +N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ +T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ +N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ +B; .xn--yua; [V5 A4_2]; [V5 A4_2] # .͔ +B; xn--0ug.xn--yua; [C1 V5]; [C1 V5] # .͔ +B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; xn--de6h.xn--37e857h; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤥.ᡄⴎ; ; xn--de6h.xn--37e857h +B; 𞤃.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; xn--de6h.xn--mnd799a; [V6]; [V6] +B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤥.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; xn--zxa5691vboja.xn--bfi293ci119b; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +B; xn----e3j6620g.xn--jlju661e.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--jlju661e.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅. +B; xn----e3j6620g.xn--znd4948j.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--znd4948j.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +B; xn----e3j6620g.xn--jlj4997dhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--jlj4997dhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +B; xn----e3j6620g.xn--znd2362jhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--znd2362jhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 P1 V6] # .︒⥱𐹬 +N; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .︒⥱𐹬 +T; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ..⥱𐹬 +N; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ..⥱𐹬 +B; xn--uf66e..xn--qti2829e; [B1 V6 A4_2]; [B1 V6 A4_2] +B; xn--uf66e..xn--0ugz28as66q; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ..⥱𐹬 +B; xn--uf66e.xn--qtiz073e3ik; [B1 V6]; [B1 V6] +B; xn--uf66e.xn--0ugz28axl3pqxna; [B1 C1 V6]; [B1 C1 V6] # .︒⥱𐹬 +B; 𐯖.𐹠Ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] +B; 𐯖.𐹠ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] +B; xn--n49c.xn--8kj8702ewicl862o; [B1 V6]; [B1 V6] +B; xn--n49c.xn--pnd4619jwicl862o; [B1 V6]; [B1 V6] +B; \u0FA4񱤯.𝟭Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.1Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.1ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ +B; xn--0fd40533g.xn--1-tws; [V5 V6]; [V5 V6] # ྤ.1ⴛ +B; xn--0fd40533g.xn--1-q1g; [V5 V6]; [V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.𝟭ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ +B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 +B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 +B; xn----6gd0617i.xn--7y2bm55m; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -ࠦ齀.릿 +T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ +N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ +T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ +N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ +B; xn--mnb6558e91kyq533a.xn--6mh27269e; [B1 B6 V6]; [B1 B6 V6] # ܜ鹝꾗.⏃ +B; xn--mnb6558e91kyq533a.xn--1uga46zs309y; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ܜ鹝꾗.⏃ +B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; xn--gdh.xn------eqf; [B1 V2 V3 V6]; [B1 V2 V3 V6] # ≮.-܈-- +T; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 +N; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 +N; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +B; xn--wo0di5177c.xn--7-zmb; [B1 V6]; [B1 V6] +B; xn--wo0di5177c.xn--7-zmb938s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.σ7 +B; xn--wo0di5177c.xn--7-xmb248s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; xn--8-zmb14974n.xn--su6h; [V6]; [V6] +B; xn--8-xmb44974n.xn--su6h; [V6]; [V6] +B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] +T; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +N; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +T; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 +N; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--0-o7j.xn--9ib.xn----ek5i065b; [B1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--0-o7j263b.xn--9ib.xn----ek5i065b; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--9ib722gbw95a.xn----ek5i065b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +B; xn--9ib722gvtfi563c.xn----ek5i065b; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4e9e.xn--l60bj21opd57g; [B2 B3 V6]; [B2 B3 V6] +B; ᠇Ⴘ。\u0603Ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇Ⴘ.Ⴈ𝆊 +B; ᠇ⴘ。\u0603ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇ⴘ.ⴈ𝆊 +B; xn--d6e009h.xn--lfb290rfu3z; [B1 V6]; [B1 V6] # ᠇ⴘ.ⴈ𝆊 +B; xn--wnd558a.xn--lfb465c1v87a; [B1 V6]; [B1 V6] # ᠇Ⴘ.Ⴈ𝆊 +B; ⒚󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; 19.󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧ⴜᣥ +B; 19.󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.xn--oe6h75760c.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.xn--oe6h75760c.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧ⴜᣥ +B; ⒚󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧ⴜᣥ +B; ⒚󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; xn--cthy466n29j3e.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; xn--cthy466n29j3e.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧ⴜᣥ +B; -𐋱𐰽⒈.Ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -𐋱𐰽1..Ⴓ; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] +B; -𐋱𐰽1..ⴓ; [B1 V3 A4_2]; [B1 V3 A4_2] +B; xn---1-895nq11a..xn--blj; [B1 V3 A4_2]; [B1 V3 A4_2] +B; xn---1-895nq11a..xn--rnd; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] +B; -𐋱𐰽⒈.ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----ecp0206g90h.xn--blj; [B1 V3 V6]; [B1 V3 V6] +B; xn----ecp0206g90h.xn--rnd; [B1 V3 V6]; [B1 V3 V6] +T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- +N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- +T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- +N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- +B; xn--ej0b.xn----d87b; [V3]; [V3] +B; xn--0ug3307c.xn----d87b; [C1 V3]; [C1 V3] # 긃.榶- +T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +B; xn--lwwp69lqs7m.xn--b7b; [V5 V6]; [V5 V6] # 뉓泓.্ +B; xn--lwwp69lqs7m.xn--b7b605i; [V5 V6]; [V5 V6] # 뉓泓.্ +T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ +N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ +N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +B; xn--ss-ti3o.xn--57c638l8774i; [B1 V5 V6]; [B1 V5 V6] # 𐹴ss.ິ +B; xn--ss-l1t5169j.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ss.ິ +B; xn--zca770nip7n.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; xn--1uf.xn----nmlz65aub; [V5 V6]; [V5 V6] # ᭄.᮪-≮≠ +B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ +B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; xn--pnd26a55x.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; xn--osd925cvyn.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳ⴑ.𑄴ⅎ +B; xn--pnd26a55x.xn--f3g7465g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ +B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; xn--187g.xn--ukjy205b8rscdeb; [B5 V6]; [B5 V6] +B; xn--187g.xn--bnd4785f8r8bdeb; [B5 V6]; [B5 V6] +B; 𐫀。⳻󠙾󠄷\u3164; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ +B; 𐫀。⳻󠙾󠄷\u1160; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ +B; xn--pw9c.xn--psd742lxt32w; [B1 V6]; [B1 V6] # 𐫀.⳻ +B; xn--pw9c.xn--mkj83l4v899a; [B1 V6]; [B1 V6] # 𐫀.⳻ +B; \u079A⾇.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; \u079A舛.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; xn--7qb6383d.xn----20c3154q; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; Ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] +B; Ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] +B; Ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; Ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; xn--gdh892bbz0d5438s..; [V6 A4_2]; [V6 A4_2] +B; xn--hnd212gz32d54x5r..; [V6 A4_2]; [V6 A4_2] +B; ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] +B; ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] +B; xn--gdh892bbz0d5438s.xn--y86c; [V6]; [V6] +B; xn--hnd212gz32d54x5r.xn--y86c; [V6]; [V6] +B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +B; xn--8m8h.xn--qgb29f6z90a; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +T; \u200D𐹶。ß; [B1 C2]; [B1] # 𐹶.ß +N; \u200D𐹶。ß; [B1 C2]; [B1 C2] # 𐹶.ß +T; \u200D𐹶。SS; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。SS; [B1 C2]; [B1 C2] # 𐹶.ss +T; \u200D𐹶。ss; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。ss; [B1 C2]; [B1 C2] # 𐹶.ss +T; \u200D𐹶。Ss; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。Ss; [B1 C2]; [B1 C2] # 𐹶.ss +B; xn--uo0d.ss; [B1]; [B1] +B; xn--1ug9105g.ss; [B1 C2]; [B1 C2] # 𐹶.ss +B; xn--1ug9105g.xn--zca; [B1 C2]; [B1 C2] # 𐹶.ß +T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +B; xn----1fa1788k.; [V3]; [V3] +B; xn----1fa1788k.xn--0ug; [C1 V3]; [C1 V3] # å둄-. +T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +B; \u3099򬎑\u1DD7𞤀.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ +B; \u3099򬎑\u1DD7𞤢.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ +B; xn--veg121fwg63altj9d.xn----eyd92688s; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ゙ᷗ𞤢.-॓ +T; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ +N; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ +B; Σ.SS񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; Σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; xn--4xa.xn--ss-y8d4760biv60n; [B5 B6 V6]; [B5 B6 V6] # σ.ss⵿ +T; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +N; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +T; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +N; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +B; xn--4xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # σ.ß⵿ +B; xn--3xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # ς.ß⵿ +B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ +B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ +B; xn--8b9a1720d.xn--kcb33b; [B1]; [B1] # ꡀ𞀟.٫֙ +T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 +N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 +T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 +N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 +B; xn--yyb56242i.xn----zir1232guu71b; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ࢩ.⧅-𐭡 +B; xn--yyb780jll63m.xn----zir1232guu71b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ࢩ.⧅-𐭡 +T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ +N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ +T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ +N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ +T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. +N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. +T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. +N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. +B; xn--ct2b0738h.xn--772h.; [V5 V6]; [V5 V6] +B; xn--0ugb3358ili2v.xn--772h.; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖. +B; xn--ct2b0738h.xn--y86cl899a; [V5 V6]; [V5 V6] +B; xn--0ugb3358ili2v.xn--y86cl899a; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖︒ +T; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß +N; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß +T; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +N; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +B; 3,.\u1CDC1.SS; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.\u1CDC1.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.\u1CDC1.Ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.xn--1-43l.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.xn--1-43l.xn--zca; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +B; 🄄.\u1CDC⒈SS; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; 🄄.\u1CDC⒈ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; 🄄.\u1CDC⒈Ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; xn--x07h.xn--ss-k1r094b; [V5 V6]; [V5 V6] # 🄄.᳜⒈ss +B; xn--x07h.xn--zca344lmif; [V5 V6]; [V5 V6] # 🄄.᳜⒈ß +B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 +B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 +B; xn--eoj16016a.xn--0v1d3848a3lr0d; [B2 B3 V6]; [B2 B3 V6] # ⵿.𑐺 +T; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +B; xn--n3b956a9zm.xn--1ch912d; [V5 V6]; [V5 V6] # ်्᷽.≠㇛ +B; xn--n3b956a9zm.xn--1ug63gz5w; [C2 V5 V6]; [C2 V5 V6] # ်्᷽.≠㇛ +T; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 P1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 +N; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴁ𐋨娤.̼٢𑖿 +T; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 +N; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 +B; xn--skjw75lg29h.xn--9ta62nrv36a; [B1 V5]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 +B; xn--skjw75lg29h.xn--9ta62ngt6aou8t; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 +B; xn--8md2578ag21g.xn--9ta62nrv36a; [B1 V5 V6]; [B1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 +B; xn--8md2578ag21g.xn--9ta62ngt6aou8t; [B1 C2 V6]; [B1 C2 V6] # Ⴁ𐋨娤.̼٢𑖿 +T; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +N; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +T; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +N; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +T; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +N; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +B; 0.Ⴄ\u0669\u0820。1.\u0FB6SS; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.ⴄ\u0669\u0820。1.\u0FB6ss; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss +B; 0.Ⴄ\u0669\u0820。1.\u0FB6Ss; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29f26o.1.xn--ss-1sj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29fp25e.1.xn--ss-1sj; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29fp25e.1.xn--zca117e; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +B; 0.xn--iib29f26o.1.xn--zca117e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +T; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +N; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6SS; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; 🄀ⴄ\u0669\u0820。⒈\u0FB6ss; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss +B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6Ss; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; xn--iib29f26o6n43c.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; xn--iib29fp25e0219a.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss +B; xn--iib29fp25e0219a.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +B; xn--iib29f26o6n43c.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +T; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ +N; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ +T; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ +N; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ +B; xn--1ch.xn----vqc; [B1 V3 V6]; [B1 V3 V6] # ≠.-٫ +B; xn--1ch.xn----vqc597q; [B1 C1 V6]; [B1 C1 V6] # ≠.-٫ +B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ +B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ +B; xn--8hb40a.xn--eib7967vner3e; [B1 V6]; [B1 V6] # ٠۱.𞠁٥ +T; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 P1 V6] # ٣⒖.᯳ +N; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٣⒖.᯳ +T; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ٣15..᯳ +N; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ٣15..᯳ +B; xn--15-gyd..xn--1zf13512buy41d; [B1 V6 A4_2]; [B1 V6 A4_2] # ٣15..᯳ +B; xn--15-gyd983x..xn--1zf13512buy41d; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ٣15..᯳ +B; xn--cib675m.xn--1zf13512buy41d; [B1 V6]; [B1 V6] # ٣⒖.᯳ +B; xn--cib152kwgd.xn--1zf13512buy41d; [B1 C1 V6]; [B1 C1 V6] # ٣⒖.᯳ +B; \u1BF3.-逋񳦭󙙮; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᯳.-逋 +B; xn--1zf.xn----483d46987byr50b; [V3 V5 V6]; [V3 V5 V6] # ᯳.-逋 +T; \u0756。\u3164\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς +N; \u0756。\u3164\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς +T; \u0756。\u1160\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς +N; \u0756。\u1160\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς +T; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +T; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +B; xn--9ob.xn--4xa380e; [V6]; [V6] # ݖ.σ +B; xn--9ob.xn--4xa380ebol; [C2 V6]; [C2 V6] # ݖ.σ +B; xn--9ob.xn--3xa580ebol; [C2 V6]; [C2 V6] # ݖ.ς +T; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +T; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +B; xn--9ob.xn--4xa574u; [V6]; [V6] # ݖ.σ +B; xn--9ob.xn--4xa795lq2l; [C2 V6]; [C2 V6] # ݖ.σ +B; xn--9ob.xn--3xa995lq2l; [C2 V6]; [C2 V6] # ݖ.ς +T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ +N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ +T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ +N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ +T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ +N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ +B; xn--57e237h.xn--5sa98523p; [V6]; [V6] # ᡆⴃ.̕ +B; xn--57e237h.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆⴃ.̕ +B; xn--bnd320b.xn--5sa98523p; [V6]; [V6] # ᡆႣ.̕ +B; xn--bnd320b.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆႣ.̕ +T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ +N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ +T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 +N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 +N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb302xhu1l.xn--4xa0426k; [B5 B6]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb962jfitku4r.xn--4xa695lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb962jfitku4r.xn--3xa895lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; \u17B5。𞯸ꡀ🄋; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # .ꡀ🄋 +B; xn--03e.xn--8b9ar252dngd; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # .ꡀ🄋 +B; 󐪺暑.⾑\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ +B; 󐪺暑.襾\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ +B; xn--tlvq3513e.xn--hib9228d; [B5 B6 V6]; [B5 B6 V6] # 暑.襾٨ +B; 󠄚≯ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ +B; 󠄚>\u0338ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ +B; xn--hdh7783c.xn--9xb680i; [B1 V6]; [B1 V6] # ≯ꡢ.᷿ +B; \uFDC3𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ +B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ +B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ +B; xn--fhbea662czx68a2tju.xn--fljz2846h; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.ⴗ +B; xn--fhbea662czx68a2tju.xn--vnd55511o; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.Ⴗ +B; \uFDC3𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ +B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ +B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ +B; xn--mi4h.xn--1uf6843smg20c; [V5 V6]; [V5 V6] # 𞀨.᭄ +T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ +N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ +T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ +N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ +B; xn--q046e.xn--v8e7227j; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--0ug18531l.xn--v8e340bp21t; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .ᡟ +T; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] +N; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟SS.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟ss.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟Ss.ጧ; [P1 V6]; [P1 V6] +B; xn--ss-7dp66033t.xn--p5d; [V6]; [V6] +B; xn--zca562jc642x.xn--p5d; [V6]; [V6] +T; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [P1 V6] # ⮒. +N; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [C1 P1 V6] # ⮒. +B; xn--b9i.xn--5p9y; [V6]; [V6] +B; xn--0ugx66b.xn--0ugz2871c; [C1 V6]; [C1 V6] # ⮒. +B; 𞤂񹞁𐹯。Ⴜ; [B2 P1 V6]; [B2 P1 V6] +B; 𞤤񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] +B; xn--no0dr648a51o3b.xn--klj; [B2 V6]; [B2 V6] +B; xn--no0dr648a51o3b.xn--0nd; [B2 V6]; [B2 V6] +B; 𞤂񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] +T; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم +N; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم +T; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم +N; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم +B; xn--s9i5458e7yb.xn--1gb4a66004i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 𐹵⮣𑄰.ضم +B; xn--0ug586bcj8p7jc.xn--1gb4a66004i; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹵⮣𑄰.ضم +T; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +N; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +T; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +N; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +T; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +N; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +T; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +N; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +B; Ⴒ。デSS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099SS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; ⴒ。テ\u3099ss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; ⴒ。デss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; Ⴒ。デSs𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099Ss𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; xn--qnd.xn--ss-9nh3648ahh20b; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デss𞤵్ +B; xn--9kj.xn--ss-9nh3648ahh20b; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; xn--9kj.xn--zca669cmr3a0f28a; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +B; xn--qnd.xn--zca669cmr3a0f28a; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デß𞤵్ +B; Ⴒ。デSS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099SS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; xn--wxc1283k.xn--7--yue; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; ≯𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; >\u0338𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; xn--hdhx157g68o0g.xn--c0e65eu616c34o7a; [V5 V6]; [V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ +B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ +B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ +B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ +B; xn--zegy26dw47iy6w2f.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛⴗ쏔.ށ +B; xn--vnd148d733ky6n9e.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛႷ쏔.ށ +T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss.xn--lgd10cu829c; [V6]; [V6] # ss.𐋳Ⴌྸ +B; ss.xn--lgd921mvv0m; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; ss.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; SS.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; Ss.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; xn--zca.xn--lgd921mvv0m; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +T; ß.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß.𐋳ⴌ\u0FB8; ; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; xn--zca.xn--lgd10cu829c; [V6]; [V6] # ß.𐋳Ⴌྸ +T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +T; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ +N; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ +T; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ +N; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ +B; xn----stc7013r.xn--b7b1419d; [B1 V3 V6]; [B1 V3 V6] # -ڞ.身্ +B; xn----stc7013r.xn--b7b305imj2f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ڞ.身্ +T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 +N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 +N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +B; xn--opb4277kuc7elqsa.xn--kr8h; [B1]; [B1] # 😮ݤ𑈵𞀖.💅 +B; xn--opb4277kuc7elqsa.xn--1ug5265p; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +T; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 P1 V5 V6] # ࣲ꙳ܒ.ᢏ +N; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 C1 C2 P1 V5 V6] # ࣲ꙳ܒ.ᢏ +B; xn--cnb37gdy00a.xn--89e02253p; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ࣲ꙳ܒ.ᢏ +B; xn--cnb37g904be26j.xn--89e849ax9363a; [B1 B6 C1 C2 V5 V6]; [B1 B6 C1 C2 V5 V6] # ࣲ꙳ܒ.ᢏ +B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ +B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ +B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ +B; xn--8kj.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # ⴑ.ڿᠲ +B; xn--pnd.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # Ⴑ.ڿᠲ +B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ +B; \u1A5A𛦝\u0C4D。𚝬𝟵; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 +B; \u1A5A𛦝\u0C4D。𚝬9; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 +B; xn--lqc703ebm93a.xn--9-000p; [V5 V6]; [V5 V6] # ᩚ్.9 +T; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ +N; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ +N; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ +N; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc42319e.xn--tgb9bz87p833hw316c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc736qts91g.xn--tgb9bz87p833hw316c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc42319e.xn--tgb9bz61cfn8mw3t2c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.Ⴣ꒘مخ +B; xn--9-vtc736qts91g.xn--tgb9bz61cfn8mw3t2c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ +N; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ +B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- +B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- +B; xn--m8e.xn----mdb555dkk71m; [V3 V5 V6]; [V3 V5 V6] # ᡖ.̟ஂ- +B; 𞠠浘。絧𞀀; [B2 B3]; [B2 B3] +B; xn--e0wp491f.xn--ud0a3573e; [B2 B3]; [B2 B3] +B; \u0596Ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ +B; \u0596Ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ +B; \u0596Ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. +B; \u0596Ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. +B; \u0596ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. +B; \u0596ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. +B; xn--hcb613r.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖ⴋ.7≯. +B; xn--hcb887c.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯. +B; \u0596ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ +B; \u0596ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ +B; xn--hcb613r.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖ⴋ.7≯︒ +B; xn--hcb887c.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯︒ +T; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 +N; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 +T; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 +N; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 +T; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 +N; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 +B; xn--f-kq9i.xn--7656e.xn--fqb4175k; [B1 V6]; [B1 V6] # f𑓂..ݾ𐹢 +B; xn--f-tgn9761i.xn--7656e.xn--fqb4175k; [B1 C2 V6]; [B1 C2 V6] # f𑓂..ݾ𐹢 +T; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 +N; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 +B; xn--f-kq9i.xn--fqb1637j8hky9452a; [B1 V6]; [B1 V6] # f𑓂.︒ݾ𐹢 +B; xn--f-tgn9761i.xn--fqb1637j8hky9452a; [B1 C2 V6]; [B1 C2 V6] # f𑓂.︒ݾ𐹢 +B; \u0845🄇𐼗︒。𐹻𑜫; [B1 B3 P1 V6]; [B1 B3 P1 V6] # ࡅ🄇︒.𐹻𑜫 +B; \u08456,𐼗。。𐹻𑜫; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 +B; xn--6,-r4e4420y..xn--zo0di2m; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 +B; xn--3vb4696jpxkjh7s.xn--zo0di2m; [B1 B3 V6]; [B1 B3 V6] # ࡅ🄇︒.𐹻𑜫 +B; 𐹈.\u1DC0𑈱𐦭; [B1 P1 V5 V6]; [B1 P1 V5 V6] # .᷀𑈱𐦭 +B; xn--jn0d.xn--7dg0871h3lf; [B1 V5 V6]; [B1 V5 V6] # .᷀𑈱𐦭 +B; Ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # Ⴂ䠺.𞤥ړ +B; ⴂ䠺。𞤥񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ +B; xn--tkj638f.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # ⴂ䠺.𞤥ړ +B; xn--9md875z.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # Ⴂ䠺.𞤥ړ +B; ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ +B; 🄇伐︒.𜙚\uA8C4; [P1 V6]; [P1 V6] # 🄇伐︒.꣄ +B; 6,伐。.𜙚\uA8C4; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ +B; xn--6,-7i3c..xn--0f9ao925c; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ +B; xn--woqs083bel0g.xn--0f9ao925c; [V6]; [V6] # 🄇伐︒.꣄ +T; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +N; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +T; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 +N; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--429az70n29i.xn--ilj7702eqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--1ugz126coy7bdbm.xn--1ug062chv7ov6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--429az70n29i.xn--ynd3619jqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +B; xn--1ugz126coy7bdbm.xn--ynd959evs1pv6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; .xn--rx21bhv12i; [V6 A4_2]; [V6 A4_2] +T; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ +N; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ +T; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ +N; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ +T; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ +N; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ +T; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ +N; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ +B; xn--xkjz802e.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--0ug132csv7o.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃ⴆ.≠ +B; xn--end1719j.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--end799ekr1p.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃Ⴆ.≠ +B; 󠁲𙩢𝟥ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ +B; 󠁲𙩢3ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ +B; xn--3-0g3es485d8i15h.xn--zvb; [B1 V6]; [B1 V6] # 3ꘌ.ࡁ +B; -.\u1886󡲣-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.ᢆ- +B; -.xn----pbkx6497q; [V3 V5 V6]; [V3 V5 V6] # -.ᢆ- +T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς +N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς +N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +B; xn--qp42f.xn--4xa3011w; [B2 B3 V6]; [B2 B3 V6] +B; xn--0ug76062m.xn--4xa595lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .σ +B; xn--0ug76062m.xn--3xa795lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +N; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +T; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +N; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +B; xn--5sf345zdk8h.xn----iv5iw606c; [B3 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +B; xn--5sf345zdk8h.xn----rgnt157hwl9g; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; xn--l8e1317j1ebz456b.xn--4xaa85plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; xn--l8e1317j1ebz456b.xn--3xab95plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; xn--l8e1317j1ebz456b.xn--3xaa16plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] +N; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] +T; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] +N; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5SS; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5ss; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5Ss; [P1 V3 V6]; [P1 V3 V6] +B; xn--t960e.-5ss; [V3 V6]; [V3 V6] +B; xn--t960e.xn---5-hia; [V3 V6]; [V3 V6] +B; 󏒰.-𝟻SS; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-𝟻ss; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-𝟻Ss; [P1 V3 V6]; [P1 V3 V6] +T; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒Ⴥ +N; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒Ⴥ +T; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒ⴥ +N; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒ⴥ +B; xn--0s9c.xn--tljz038l0gz4b; [V5 V6]; [V5 V6] +B; xn--1ug9533g.xn--tljz038l0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒ⴥ +B; xn--0s9c.xn--9nd3211w0gz4b; [V5 V6]; [V5 V6] +B; xn--1ug9533g.xn--9nd3211w0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒Ⴥ +T; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ß𬵩 +N; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß𬵩 +T; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +T; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +T; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +B; xn--ey1p.xn--ss-eq36b; [V6]; [V6] +B; xn--ey1p.xn--ss-n1tx0508a; [C2 V6]; [C2 V6] # .ss𬵩 +B; xn--ey1p.xn--zca870nz438b; [C2 V6]; [C2 V6] # .ß𬵩 +T; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ +N; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +T; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ +N; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +B; xn--634m.xn--lsa46nuub; [B1 V5]; [B1 V5] # 𭉝.߱́غ +B; xn--0ugy003y.xn--lsa46nuub; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +T; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 P1 V6] # 𑈶.𐹡 +N; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𑈶.𐹡 +B; xn--9g1d1288a.xn--8n0d; [B1 V6]; [B1 V6] +B; xn--0ug7946gzpxf.xn--8n0d; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𑈶.𐹡 +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--zb9h5968x.xn--4xa378i1mfjw7y; [V5 V6]; [V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--0ug3766p5nm1b.xn--4xa378i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--0ug3766p5nm1b.xn--3xa578i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [P1 V6] # ⒋.⒈ +N; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [C2 P1 V6] # ⒋.⒈ +T; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 4..1. +N; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 4..1. +B; 4..1.xn--sf51d; [V6 A4_2]; [V6 A4_2] +B; 4..1.xn--1ug64613i; [C2 V6 A4_2]; [C2 V6 A4_2] # 4..1. +B; xn--wsh.xn--tsh07994h; [V6]; [V6] +B; xn--wsh.xn--1ug58o74922a; [C2 V6]; [C2 V6] # ⒋.⒈ +T; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; xn--ss-svd.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لss.᩠𐇽𞤾 +B; xn--zca57y.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لß.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; 𐹽𑄳񼜲.\u1DDF\u17B8\uA806𑜫; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 +B; xn--1o0di0c0652w.xn--33e362arr1l153d; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 +T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +B; xn--blj6306ey091d.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 +B; xn--1ugy52cym7p7xu5e.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 +B; xn--rnd8945ky009c.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 +B; xn--rnd479ep20q7x12e.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 +T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +B; 𐨿.🄆—; [P1 V5 V6]; [P1 V5 V6] +B; 𐨿.5,—; [P1 V5 V6]; [P1 V5 V6] +B; xn--0s9c.xn--5,-81t; [P1 V5 V6]; [P1 V5 V6] +B; xn--0s9c.xn--8ug8324p; [V5 V6]; [V5 V6] +B; 򔊱񁦮۸。󠾭-; [P1 V3 V6]; [P1 V3 V6] +B; xn--lmb18944c0g2z.xn----2k81m; [V3 V6]; [V3 V6] +B; 𼗸\u07CD𐹮。\u06DDᡎᠴ; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ߍ𐹮.ᡎᠴ +B; xn--osb0855kcc2r.xn--tlb299fhc; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ߍ𐹮.ᡎᠴ +T; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮႾ🄂.🚗ࡁ +N; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ🄂.🚗ࡁ +T; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ +N; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ +T; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ +N; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-v3o625k.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-v3o161c53q.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-ogkx89c.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ +B; xn--1,-ogkx89c39j.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ +T; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮⴞ🄂.🚗ࡁ +N; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--h7e438h1p44a.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--h7e341b0wlbv45b.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--2nd129ai554b.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮႾ🄂.🚗ࡁ +B; xn--2nd129ay2gnw71c.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮႾ🄂.🚗ࡁ +B; \u0601\u0697.𑚶񼡷⾆; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 +B; \u0601\u0697.𑚶񼡷舌; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 +B; xn--jfb41a.xn--tc1ap851axo39c; [B1 V5 V6]; [B1 V5 V6] # ڗ.𑚶舌 +B; 🞅󠳡󜍙.񲖷; [P1 V6]; [P1 V6] +B; xn--ie9hi1349bqdlb.xn--oj69a; [V6]; [V6] +T; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4Ⴄ +N; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4Ⴄ +T; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4ⴄ +N; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-ivs; [V5 V6]; [V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-sgn589c; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-f0g; [V5 V6]; [V5 V6] # ⃧-.4Ⴄ +B; xn----9snu5320fi76w.xn--4-f0g649i; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4Ⴄ +T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; xn--hwe.xn--ss-ci1ub261a; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠ss𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; xn--hwe.xn--zca4946pblnc; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +T; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--zca4946pblnc; NV8 +B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ₁。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1.xn--gd9al691d; [B1 B2 B3]; [B1 B2 B3] +B; ₁。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] +T; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 P1 V6] # . +N; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 B6 C1 P1 V6] # . +B; xn--kg4n.xn--2b7hs861pl540a; [B2 B3 V6]; [B2 B3 V6] +B; xn--0ug27500a.xn--2b7hs861pl540a; [B2 B3 B6 C1 V6]; [B2 B3 B6 C1 V6] # . +B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; xn--hdh5636g.xn--ci2d; [V5 V6]; [V5 V6] +T; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 +N; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 +T; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 +N; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 +T; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 +N; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 +T; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 +N; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 +B; xn--gdhz03bxt42d.xn--lrb6479j; [V5 V6]; [V5 V6] # ⴋ≮.ާ𐋣 +B; xn--gdhz03bxt42d.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # ⴋ≮.ާ𐋣 +B; xn--jnd802gsm17c.xn--lrb6479j; [V5 V6]; [V5 V6] # Ⴋ≮.ާ𐋣 +B; xn--jnd802gsm17c.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # Ⴋ≮.ާ𐋣 +B; \u17D2.򆽒≯; [P1 V5 V6]; [P1 V5 V6] # ្.≯ +B; \u17D2.򆽒>\u0338; [P1 V5 V6]; [P1 V5 V6] # ្.≯ +B; xn--u4e.xn--hdhx0084f; [V5 V6]; [V5 V6] # ្.≯ +B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; xn--c0e34564d.xn--9ca207st53lg3f; [V5 V6]; [V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +T; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 +N; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 +T; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 +N; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +T; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 +N; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +B; xn--09e4694e..xn--ye6h; [A4_2]; [A4_2] # ᢇ꣄..𞤺 +B; xn--09e669a6x8j..xn--ye6h; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +T; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 +N; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 +B; xn--09e4694e.xn--y86cv562b; [B1 V6]; [B1 V6] # ᢇ꣄.︒𞤺 +B; xn--09e669a6x8j.xn--y86cv562b; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ᢇ꣄.︒𞤺 +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +B; xn--fze3930v7hz6b.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ +B; xn--fze607b9651bjwl7c.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ +B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ +B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ +B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ +B; xn--rlj.xn--vhb294g; [V5]; [V5] # ⴣ.ٓᢤ +B; xn--7nd.xn--vhb294g; [V5 V6]; [V5 V6] # Ⴣ.ٓᢤ +B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; xn--oub.xn--sljz109bpe25dviva; [V6]; [V6] # ࠓ.싉ⴤ +B; xn--oub.xn--8nd9522gpe69cviva; [V6]; [V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; xn--gdh1854cn19c.xn--kqi; [V5 V6]; [V5 V6] # ꨬ𑲫≮.⤂ +B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; xn--mfb416c0jox02t.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.ⴃ +B; xn--mfb266l4khr54u.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮ⴢ.ⴃ +B; xn--mfb416c0jox02t.xn--bnd; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; 𑁅。-; [V3 V5]; [V3 V5] +B; xn--210d.-; [V3 V5]; [V3 V5] +B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; xn--h1c25913jfwov.xn--dib144ler5f; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ්.饈≠٤ +B; 𞥃ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; xn--7-v4j2826w.xn--4-ogoy01bou3i; [B1 B2 V6]; [B1 B2 V6] +B; 𞤡ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 򠿯ᡳ-𑐻.𐹴𐋫\u0605󑎳; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡳ-𑐻.𐹴𐋫 +B; xn----m9j3429kxmy7e.xn--nfb7950kdihrp812a; [B1 B6 V6]; [B1 B6 V6] # ᡳ-𑐻.𐹴𐋫 +B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- +B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- +B; xn--3vb26hb6834b.xn----i37ez0957g; [B5 B6 V6]; [B5 B6 V6] # ࡅੑ.넨- +T; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [P1 V3 V6] # ꡦᡑ⒈.𐋣- +N; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ꡦᡑ⒈.𐋣- +T; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [V3 A4_2] # ꡦᡑ1..𐋣- +N; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- +B; xn--1-o7j0610f..xn----381i; [V3 A4_2]; [V3 A4_2] +B; xn--1-o7j663bdl7m..xn----381i; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- +B; xn--h8e863drj7h.xn----381i; [V3 V6]; [V3 V6] +B; xn--h8e470bl0d838o.xn----381i; [C2 V3 V6]; [C2 V3 V6] # ꡦᡑ⒈.𐋣- +B; Ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ +B; Ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ +B; ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ +B; xn--3kj.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # ⴌ.ٹ +B; xn--knd.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # Ⴌ.ٹ +B; ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ +B; 𐮁𐭱.\u0F84\u135E-\u1CFA; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐮁𐭱.྄፞- +B; xn--r19c5a.xn----xjg270ag3m; [B1 V5 V6]; [B1 V5 V6] # 𐮁𐭱.྄፞- +T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 +N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 +T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 +N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 +T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 +N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 +T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 +N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 +B; 1.xn----zw5a.xn--kp5b; [V3]; [V3] +B; 1.xn----tgnz80r.xn--kp5b; [C2 V3]; [C2 V3] # 1.䰹-.웈 +B; xn----dcp160o.xn--kp5b; [V3 V6]; [V3 V6] +B; xn----tgnx5rjr6c.xn--kp5b; [C2 V3 V6]; [C2 V3 V6] # ⒈䰹-.웈 +T; て。\u200C󠳽\u07F3; [C1 P1 V6]; [P1 V6] # て.߳ +N; て。\u200C󠳽\u07F3; [C1 P1 V6]; [C1 P1 V6] # て.߳ +B; xn--m9j.xn--rtb10784p; [V6]; [V6] # て.߳ +B; xn--m9j.xn--rtb154j9l73w; [C1 V6]; [C1 V6] # て.߳ +T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; xn--4xa.xn--3lb1944f; [V5]; [V5] # σ.꧀ۧ +B; xn--3xa.xn--3lb1944f; [V5]; [V5] # ς.꧀ۧ +B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; \u0BCD󥫅򌉑.ႢႵ; [P1 V5 V6]; [P1 V5 V6] # ்.ႢႵ +B; \u0BCD󥫅򌉑.ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.ⴂⴕ +B; \u0BCD󥫅򌉑.Ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.Ⴂⴕ +B; xn--xmc83135idcxza.xn--9md086l; [V5 V6]; [V5 V6] # ்.Ⴂⴕ +B; xn--xmc83135idcxza.xn--tkjwb; [V5 V6]; [V5 V6] # ்.ⴂⴕ +B; xn--xmc83135idcxza.xn--9md2b; [V5 V6]; [V5 V6] # ்.ႢႵ +T; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ🄈走֦. +N; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ🄈走֦. +T; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. +N; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. +B; xn--7,-bid991urn3k.xn--1tb13454l; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. +B; xn--7,-bid991urn3k.xn--1tb334j1197q; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. +B; xn--xcb756i493fwi5o.xn--1tb13454l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ᰲ🄈走֦. +B; xn--xcb756i493fwi5o.xn--1tb334j1197q; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᰲ🄈走֦. +B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] +B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] +B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] +B; xn--hbf.xn--s5a83117e; [V6]; [V6] +B; xn--hbf.xn--d5a86117e; [V6]; [V6] +B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] +B; \u0668-。񠏇🝆ᄾ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٨-.🝆ᄾ +B; xn----oqc.xn--qrd1699v327w; [B1 V3 V6]; [B1 V3 V6] # ٨-.🝆ᄾ +B; -𐋷𖾑。󠆬; [V3]; [V3] +B; xn----991iq40y.; [V3]; [V3] +T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ +N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ +T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ +N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ +B; xn--422b325mqb6i.xn--429a8682s; [B1 V5 V6]; [B1 V5 V6] # 𐹳🐴멈.꯭ +B; xn--0ug6681d406b7bwk.xn--429a8682s; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𐹳🐴멈.꯭ +B; ≮.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ +B; <\u0338.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ +B; xn--gdh.xn--lfb92e; [B1 V6]; [B1 V6] # ≮.ݩ +T; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 +N; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 +T; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 +N; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 +B; xn--tc1ao37z.xn--6e2dw557azds2d; [B2 B3 B5 B6 V5 V6]; [B2 B3 B5 B6 V5 V6] +B; xn--tc1ao37z.xn--0ugx728gi1nfwqz2e; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 舌.𑚶 +T; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς +N; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς +T; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς +N; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς +T; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς +N; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς +T; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ +N; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ +T; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ +N; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ +B; xn----2ws.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----rgn530d.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1σσ +B; xn----z1g.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----z1g168i.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1σσ +B; xn----rgn530d.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1ςς +B; xn----z1g168i.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1ςς +T; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς +N; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς +T; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ +N; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ +T; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ +N; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ +B; 𑲘󠄒𓑡。𝟪Ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 𑲘󠄒𓑡。8Ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 𑲘󠄒𓑡。8ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; xn--7m3d291b.xn--8-vws; [V5 V6]; [V5 V6] +B; xn--7m3d291b.xn--8-s1g; [V5 V6]; [V5 V6] +B; 𑲘󠄒𓑡。𝟪ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; xn--ekb23dj4at01n.xn--43e96bh910b; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 +B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 +B; xn--zxf.xn--fx7ho0250c; [V5 V6]; [V5 V6] # ᮫.🂉 +T; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.ς𐹮𑈵 +N; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.ς𐹮𑈵 +T; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 +N; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 +T; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 +N; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--4xa8467k5mc; [B5 V6]; [B5 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--4xa895lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--3xa006lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.ς𐹮𑈵 +B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; xn--17e9625js1h.xn--sb2d; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +T; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [P1 V3 V6] # -. +N; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -. +B; xn----7i12hu122k9ire.; [V3 V6]; [V3 V6] +B; xn----7i12hu122k9ire.xn--0ug; [C1 V3 V6]; [C1 V3 V6] # -. +B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ +B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ +B; xn--bo0d.xn--dsb; [B1]; [B1] # 𐹣.߂ +B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ +B; xn----8cd.xn--r5g; [B1 V3]; [B1 V3] # -ߡ.ↄ +B; xn----8cd.xn--q5g; [B1 V3 V6]; [B1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ +T; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ß哑 +N; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ß哑 +T; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ß哑 +N; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ß哑 +T; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +T; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +T; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +B; -..xn--ss-h46c5711e; [B1 B5 B6 V3 V6 A4_2]; [B1 B5 B6 V3 V6 A4_2] +B; xn----tgn..xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ss哑 +B; xn----tgn..xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ß哑 +T; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +T; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +T; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +B; xn----o89h.xn--ss-h46c5711e; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----tgnt341h.xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ss哑 +B; xn----tgnt341h.xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ß哑 +B; ︒.\uFE2F𑑂; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ +B; ︒.𑑂\uFE2F; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ +B; 。.𑑂\uFE2F; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ +B; ..xn--s96cu30b; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ +B; xn--y86c.xn--s96cu30b; [V5 V6]; [V5 V6] # ︒.𑑂︯ +T; \uA92C。\u200D; [C2 V5]; [V5] # ꤬. +N; \uA92C。\u200D; [C2 V5]; [C2 V5] # ꤬. +B; xn--zi9a.; [V5]; [V5] # ꤬. +B; xn--zi9a.xn--1ug; [C2 V5]; [C2 V5] # ꤬. +T; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 P1 V6] # .هج +N; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج +T; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 P1 V6] # .هج +N; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج +B; xn--d356e.xn--rgb7c; [B1 V6]; [B1 V6] # .هج +B; xn--1ug80651l.xn--rgb7c; [B1 C2 V6]; [B1 C2 V6] # .هج +T; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +N; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +N; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +N; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; -Ⴄ0\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; -ⴄ0\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; xn---0-iyd8660b.xn--4xa9120l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; xn---0-iyd216h.xn--4xa9120l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; xn---0-iyd8660b.xn--3xa1220l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; xn---0-iyd216h.xn--3xa1220l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +N; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; -Ⴄ𝟢\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; -ⴄ𝟢\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; 󦈄。-; [P1 V3 V6]; [P1 V3 V6] +B; xn--xm38e.-; [V3 V6]; [V3 V6] +T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; xn--pgh4639f.xn--ss-ifj426nle504a; [V6]; [V6] # ⋠𐋮.༘ss≯ +B; xn--pgh4639f.xn--zca593eo6oc013y; [V6]; [V6] # ⋠𐋮.༘ß≯ +B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; 1𐋸\u0664。󠢮\uFBA4񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; 1𐋸\u0664。󠢮\u06C0񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; 1𐋸\u0664。󠢮\u06D5\u0654񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; xn--1-hqc3905q.xn--zkb83268gqee4a; [B1 V6]; [B1 V6] # 1𐋸٤.ۀ +T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ +N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ +N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ +N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ +B; xn----gz7a.xn--qlj9223eywx0b; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----gz7a.xn--0ug472cfq0pus98b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴ⴢ +B; xn----gz7a.xn--6nd5001kyw98a; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----gz7a.xn--6nd249ejl4pusr7b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ +N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ +B; 𝟺𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; 4𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; 4𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; xn--4-cvc5384q.xn--le6hi7322b; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 4𐋷ڹ.𞤭 +B; 𝟺𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; ≯-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] +B; ≯-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] +B; xn----ogox061d5i8d.1.xn--lo0d; [B1 V6]; [B1 V6] +B; xn----ogox061d5i8d.xn--tsh0666f; [B1 V6]; [B1 V6] +B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 +B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 +B; xn--xta.xn--e91aw9417e; [V5 V6]; [V5 V6] # ̰.蚀 +T; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +N; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +T; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +N; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +T; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +N; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; \u05D9\u05BCႸ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \u05D9\u05BCⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; \u05D9\u05BCႸ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; xn--kdb1d867b.xn--ss-yju5690ken9h; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ss⃗ +B; xn--kdb1d278n.xn--ss-yju5690ken9h; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; xn--kdb1d278n.xn--zca284nhg9nrrxg; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; xn--kdb1d867b.xn--zca284nhg9nrrxg; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ß⃗ +T; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +N; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; \uFB39Ⴘ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \uFB39ⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; \uFB39Ⴘ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 +B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 +B; xn--rxfz314ilg20c.xn--t9q; [B1 V5 V6]; [B1 V5 V6] # ᮣ𐹰.凬 +T; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟🄈ꡎ.྄ +N; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟🄈ꡎ.྄ +T; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ +N; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--7,-gh9hg322i.xn--3ed; [P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--7,-n1t0654eqo3o.xn--3ed; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--nc9aq743ds0e.xn--3ed; [V5 V6]; [V5 V6] # 🢟🄈ꡎ.྄ +B; xn--1ug4874cfd0kbmg.xn--3ed; [C2 V5 V6]; [C2 V5 V6] # 🢟🄈ꡎ.྄ +B; ꡔ。\u1039ᢇ; [V5]; [V5] # ꡔ.္ᢇ +B; xn--tc9a.xn--9jd663b; [V5]; [V5] # ꡔ.္ᢇ +B; \u20EB≮.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 +B; \u20EB<\u0338.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 +B; xn--e1g71d.xn--772h; [V5 V6]; [V5 V6] # ⃫≮.𝨖 +B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; xn--hdh433bev8e.xn--rpb5x392bcyt; [B5 B6 V6]; [B5 B6 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; xn--6nd461g478e.xn--rpb5x49td2h; [B5 B6 V6]; [B5 B6 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +T; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ +N; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ +T; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ +N; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ +B; xn--3j9al6189a.xn--0ib8893fegvj; [B2 B3 V6]; [B2 B3 V6] # ꥓.𞤻ٻꡘ +B; xn--0ug8815chtz0e.xn--0ib8893fegvj; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ꥓.𞤻ٻꡘ +T; \u200C.≯; [C1 P1 V6]; [P1 V6 A4_2] # .≯ +N; \u200C.≯; [C1 P1 V6]; [C1 P1 V6] # .≯ +T; \u200C.>\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≯ +N; \u200C.>\u0338; [C1 P1 V6]; [C1 P1 V6] # .≯ +B; .xn--hdh; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--hdh; [C1 V6]; [C1 V6] # .≯ +B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 +B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 +B; xn----7m53aj640l.xn----8f4br83t; [V3 V5 V6]; [V3 V5 V6] # -.꯭-悜 +T; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 +N; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 +B; xn--87e0ol04cdl39e.xn----qinu247r; [V3 V6]; [V3 V6] # ᡉ⬞ᢜ.-𞣑 +B; xn--87e0ol04cdl39e.xn----ugn5e3763s; [C2 V3 V6]; [C2 V3 V6] # ᡉ⬞ᢜ.-𞣑 +T; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂႴ +N; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂႴ +T; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂႴ +N; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂႴ +T; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ +N; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ +T; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂⴔ +N; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--1nd9032d.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--1nd159e1y2f.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--llj1920a.xn--7ib268q; [B1 B2 B3]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ +B; 9.xn--0ug862cbm5e.xn--7ib268q; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ +B; 9.xn--1nd9032d.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂႴ +B; 9.xn--1nd159e1y2f.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂႴ +T; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃ⴝ.ڂⴔ +N; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃ⴝ.ڂⴔ +T; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂⴔ +N; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1nd362hy16e.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1nd159ecmd785k.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1shy52abz3f.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃ⴝ.ڂⴔ +B; xn--0ugx0px1izu2h.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃ⴝ.ڂⴔ +B; xn--1nd362hy16e.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂႴ +B; xn--1nd159ecmd785k.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂႴ +T; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 V3] # ߡ.--⸬ +N; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ +B; xn--8sb.xn-----iw2a; [B1 V3]; [B1 V3] # ߡ.--⸬ +B; xn--8sb884j.xn-----iw2a; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ +B; 𞥓.\u0718; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ +B; 𞥓.\u0718; ; xn--of6h.xn--inb # 𞥓.ܘ +B; xn--of6h.xn--inb; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ +B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් +B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් +B; -.xn----ptf; [V3]; [V3] # -.-් +B; 󠇝\u075B-.\u1927; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ +B; xn----k4c.xn--lff; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ +B; 𞤴󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +B; 𞤴󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; 𞤒󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; xn--fuix729epewf.xn--5-w93e.xn--7b83e; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; 𞤒󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +B; xn--fuix729epewf.xn--xsh5029b6e77i; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +T; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 +N; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +T; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 +N; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +B; 0.xn--mn9cz2s; [B1 V5]; [B1 V5] +B; 0.xn--0ugc8040p9hk; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +B; Ⴚ𐋸󠄄。𝟝ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; Ⴚ𐋸󠄄。5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; ⴚ𐋸󠄄。5ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; xn--ilj2659d.xn--5-dug9054m; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; ⴚ𐋸.5ퟶ\u103A; ; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; Ⴚ𐋸.5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; xn--ynd2415j.xn--5-dug9054m; [V6]; [V6] # Ⴚ𐋸.5ퟶ် +B; ⴚ𐋸󠄄。𝟝ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +T; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ﹪.ᷡᤢ +N; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ﹪.ᷡᤢ +T; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ +N; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ +B; xn---%-u4o.xn--gff52t; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ +B; xn---%-u4oy48b.xn--gff52t; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ +B; xn----c6jx047j.xn--gff52t; [V3 V5 V6]; [V3 V5 V6] # -ᠹ﹪.ᷡᤢ +B; xn----c6j614b1z4v.xn--gff52t; [C2 V5 V6]; [C2 V5 V6] # -ᠹ﹪.ᷡᤢ +B; ≠.ᠿ; [P1 V6]; [P1 V6] +B; =\u0338.ᠿ; [P1 V6]; [P1 V6] +B; xn--1ch.xn--y7e; [V6]; [V6] +B; \u0723\u05A3。㌪; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; \u0723\u05A3。ハイツ; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; xn--ucb18e.xn--eck4c5a; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; \u0723\u05A3.ハイツ; ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; 𞷥󠆀≮.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- +B; 𞷥󠆀<\u0338.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- +B; xn--gdhx802p.xn----i2s; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] # ≮.⵿- +B; ₆榎򦖎\u0D4D。𞤅\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 6榎򦖎\u0D4D。𞤅\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 6榎򦖎\u0D4D。𞤧\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; xn--6-kmf4691ejv41j.xn--mhba10ch545mn8v8h; [B1 B3 V6]; [B1 B3 V6] # 6榎്.𞤧ۭيي +B; ₆榎򦖎\u0D4D。𞤧\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 𣩫.򌑲; [P1 V6]; [P1 V6] +B; 𣩫.򌑲; [P1 V6]; [P1 V6] +B; xn--td3j.xn--4628b; [V6]; [V6] +T; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 P1 V6] # ︒.ڹ +N; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # ︒.ڹ +B; xn--y86c.xn--skb; [B1 V6]; [B1 V6] # ︒.ڹ +B; xn--1ug2658f.xn--skb080k; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # ︒.ڹ +B; xn--skb; \u06B9; xn--skb # ڹ +B; \u06B9; ; xn--skb # ڹ +T; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹦𐹶. +N; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹦𐹶. +B; xn--eo0d6a.xn--sxg; [B1 V6]; [B1 V6] # 𐹦𐹶. +B; xn--0ug4994goba.xn--sxg; [B1 C1 V6]; [B1 C1 V6] # 𐹦𐹶. +B; \u0C4D𝨾\u05A9𝟭。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; \u0C4D𝨾\u05A91。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; xn--1-rfc312cdp45c.xn----nq0j; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; 򣿈。뙏; [P1 V6]; [P1 V6] +B; 򣿈。뙏; [P1 V6]; [P1 V6] +B; xn--ph26c.xn--281b; [V6]; [V6] +B; 񕨚󠄌󑽀ᡀ.\u08B6; [P1 V6]; [P1 V6] # ᡀ.ࢶ +B; xn--z7e98100evc01b.xn--czb; [V6]; [V6] # ᡀ.ࢶ +T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . +T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . +B; .xn--6x4u; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--6x4u; [C2 V6]; [C2 V6] # . +B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; xn--9vb4167c.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; 𐣸\u0315𐮇.⒈ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.⒈ꡦ +B; 𐣸\u0315𐮇.1.ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.1.ꡦ +B; xn--5sa9915kgvb.1.xn--cd9a; [B1 V6]; [B1 V6] # ̕𐮇.1.ꡦ +B; xn--5sa9915kgvb.xn--tshw539b; [B1 V6]; [B1 V6] # ̕𐮇.⒈ꡦ +T; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597c.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597cdmmfa.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb323r.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 +B; xn--tcb736kea974k.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597c.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597cdmmfa.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb323r.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 +B; xn--tcb736kea974k.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 +T; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +B; xn--vn7c.xn--hdh501y8wvfs5h; [V6]; [V6] # .曳襾𐋰≯ +B; xn--0ug2139f.xn--hdh501y8wvfs5h; [C1 V6]; [C1 V6] # .曳襾𐋰≯ +T; ≯⒈。ß; [P1 V6]; [P1 V6] +N; ≯⒈。ß; [P1 V6]; [P1 V6] +T; >\u0338⒈。ß; [P1 V6]; [P1 V6] +N; >\u0338⒈。ß; [P1 V6]; [P1 V6] +T; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +N; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +T; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +N; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; xn--1-ogo..ss; [V6 A4_2]; [V6 A4_2] +B; xn--1-ogo..xn--zca; [V6 A4_2]; [V6 A4_2] +B; >\u0338⒈。SS; [P1 V6]; [P1 V6] +B; ≯⒈。SS; [P1 V6]; [P1 V6] +B; ≯⒈。ss; [P1 V6]; [P1 V6] +B; >\u0338⒈。ss; [P1 V6]; [P1 V6] +B; >\u0338⒈。Ss; [P1 V6]; [P1 V6] +B; ≯⒈。Ss; [P1 V6]; [P1 V6] +B; xn--hdh84f.ss; [V6]; [V6] +B; xn--hdh84f.xn--zca; [V6]; [V6] +T; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ +N; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ +N; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +N; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m.xn---6-lve6529a; [B1 B2 B3]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m343e.xn---6-lve6529a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m.xn---6-lve002g; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ٧ڳ.ߚ-6Ⴙ +B; xn--gib6m343e.xn---6-lve002g; [B1 B2 B3 C2 V6]; [B1 B2 B3 C2 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +N; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ +B; .xn--1ch; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--1ch; [C1 V6]; [C1 V6] # .≠ +T; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ +N; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ +B; xn--461dw464a.xn--v8e29loy65a; [V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ +B; xn--461dw464a.xn--v8e29ldzfo952a; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ +T; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- +N; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- +T; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- +N; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- +T; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- +N; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- +T; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- +N; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- +B; xn--6j00chy9a.xn----81n51bt713h; [V3 V5 V6]; [V3 V5 V6] +B; xn--1ug15151gkb5a.xn----81n51bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴ↄ≠- +B; xn--6j00chy9a.xn----61n81bt713h; [V3 V5 V6]; [V3 V5 V6] +B; xn--1ug15151gkb5a.xn----61n81bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴Ↄ≠- +T; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 +N; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 +T; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 +N; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 +T; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +T; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +B; xn--7-zmb872a.xn--wy1ao4929b; [B2 V6]; [B2 V6] # ߢσ7.蔑 +B; xn--7-zmb872aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢσ7.蔑 +B; xn--7-xmb182aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢς7.蔑 +T; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +T; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +B; 𐹰.\u0600; [B1 P1 V6]; [B1 P1 V6] # 𐹰. +B; xn--oo0d.xn--ifb; [B1 V6]; [B1 V6] # 𐹰. +B; -\u08A8.𱠖; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ࢨ. +B; xn----mod.xn--5o9n; [B1 V3 V6]; [B1 V3 V6] # -ࢨ. +B; ≯𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] +B; ≯𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] +B; xn--hdh7151p.xn--1-dy1d.; [B1 V6]; [B1 V6] +B; xn--hdh7151p.xn--tsh1248a; [B1 V6]; [B1 V6] +B; \u0616𞥙䐊\u0650.︒\u0645↺\u069C; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ +B; \u0616𞥙䐊\u0650.。\u0645↺\u069C; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ +B; xn--4fb0j490qjg4x..xn--hhb8o948e; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ +B; xn--4fb0j490qjg4x.xn--hhb8o948euo5r; [B1 V5 V6]; [B1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ +T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; Ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] +B; Ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--3kj.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] +B; xn--knd.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] +B; ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] +B; xn--3kj.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] +B; xn--knd.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] +B; 񧞿╏。𞩕󠁾; [B3 B6 P1 V6]; [B3 B6 P1 V6] +B; xn--iyh90030d.xn--1m6hs0260c; [B3 B6 V6]; [B3 B6 V6] +T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ +N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ +N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +B; xn--kxh.xn--eoc8m432a; [V5]; [V5] # ┮.ఀ్᜴ +B; xn--1ug04r.xn--eoc8m432a40i; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +B; 򹚪。🄂; [P1 V6]; [P1 V6] +B; 򹚪。1,; [P1 V6]; [P1 V6] +B; xn--n433d.1,; [P1 V6]; [P1 V6] +B; xn--n433d.xn--v07h; [V6]; [V6] +B; 𑍨刍.🛦; [V5]; [V5] +B; xn--rbry728b.xn--y88h; [V5]; [V5] +B; 󠌏3。\u1BF1𝟒; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 +B; 󠌏3。\u1BF14; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 +B; xn--3-ib31m.xn--4-pql; [V5 V6]; [V5 V6] # 3.ᯱ4 +T; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ +N; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ +N; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ +N; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc2270akm6f.xn--0gb6bxk; [B2 B3]; [B2 B3] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc2270akm6f.xn--0gb6bxkx18g; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc039eqq6k.xn--0gb6bxk; [B2 B3 V6]; [B2 B3 V6] # ڇ6Ⴔ辘.صيڇ +B; xn--6-gsc039eqq6k.xn--0gb6bxkx18g; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ +N; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; 󠄍.𐮭𞰬򻫞۹; [B2 P1 V6 A4_2]; [B2 P1 V6 A4_2] +B; .xn--mmb3954kd0uf1zx7f; [B2 V6 A4_2]; [B2 V6 A4_2] +B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; xn--hdh8193c.xn--5z40cp629b; [V6]; [V6] # ≯. +T; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +N; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +B; Σო\u067B.Σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; Σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; xn--4xa60l26n.xn--4xa21o; [B5 B6]; [B5 B6] # σოٻ.σܔ +T; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +N; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +T; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +N; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +B; xn--4xa60l26n.xn--3xa41o; [B5 B6]; [B5 B6] # σოٻ.ςܔ +B; xn--3xa80l26n.xn--3xa41o; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. +B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. +B; xn--vob0c4369twfv8b.xn--kl46e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ݈𠄯ݟ. +T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +B; xn--1t56e.xn--1ch153bqvw; [V6]; [V6] +B; xn--1t56e.xn--1ug73gzzpwi3a; [C2 V6]; [C2 V6] # .䤫≠ⴞ +B; xn--1t56e.xn--2nd141ghl2a; [V6]; [V6] +B; xn--1t56e.xn--2nd159e9vb743e; [C2 V6]; [C2 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; xn--bv0d02c.xn--bo0dq650b; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; ⒊⒈𑁄。9; [P1 V6]; [P1 V6] +B; 3.1.𑁄。9; [V5]; [V5] +B; 3.1.xn--110d.9; [V5]; [V5] +B; xn--tshd3512p.9; [V6]; [V6] +T; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +B; xn----ngo823c.xn--49-ki3om2611f; [B1 V3 V6]; [B1 V3 V6] # -ⷱ≮.𐹱49 +B; xn----sgn20i14s.xn--49-ki3om2611f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ⷱ≮.𐹱49 +B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; xn----pgow547d.xn--5vb; [B1 V3 V6]; [B1 V3 V6] # -≯딾.ࡇ +T; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢⒈𐹠-. +N; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢⒈𐹠-. +T; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢1.𐹠-. +N; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢1.𐹠-. +B; xn--1-bf0j.xn----516i.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] +B; xn--1-bf0j.xn----516i.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢1.𐹠-. +B; xn----dcpy090hiyg.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] +B; xn----dcpy090hiyg.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢⒈𐹠-. +B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 +B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 +B; xn--oua.xn--mr9c; [V5]; [V5] # ͊.𐨎 +B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; xn--gdh2512e.xn--i4c; [V5 V6]; [V5 V6] # 훉≮.ิ +B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇2\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇2\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; xn--trj8045le6s9b.xn--2-upc23918acjsj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +T; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- +N; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- +N; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +B; xn--ss-jepz4596r.xn----dnc5e1er384z; [B1 V3 V6]; [B1 V3 V6] # ssᢞ.٠نخ- +B; xn--ss-jep006bqt765b.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ssᢞ.٠نخ- +B; xn--zca272jbif10059a.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- +B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- +B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- +B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- +B; xn--fc9a.xn----qmg097k469k; [V3]; [V3] # ꡆ.ↄྵ놮- +B; xn--fc9a.xn----qmg787k869k; [V3 V6]; [V3 V6] # ꡆ.Ↄྵ놮- +T; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک +N; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک +T; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک +N; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک +B; xn--ghbcp.xn--ckb36214f; [B5 B6 V6]; [B5 B6 V6] # لمي.ک +B; xn--ghbcp494x.xn--ckb36214f; [B3 B5 B6 C2 V6]; [B3 B5 B6 C2 V6] # لمي.ک +B; Ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐲒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐲒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; xn--0nd679cf3eq67y.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; xn--r1f68xh1jgv7u.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; 𐋵。\uFCEC; [B1]; [B1] # 𐋵.كم +B; 𐋵。\u0643\u0645; [B1]; [B1] # 𐋵.كم +B; xn--p97c.xn--fhbe; [B1]; [B1] # 𐋵.كم +B; 𐋵.\u0643\u0645; [B1]; [B1] # 𐋵.كم +B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; xn--gdh0880o.xn--4tjx101bsg00ds9pyc; [V6]; [V6] # ≮.ꫬ⹈ +B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +B; xn--2ua889htsp.xn--cva2687k2tv0g; [B1 V5 V6]; [B1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +T; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +N; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +T; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +N; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +B; xn--5gbwa03bg24e.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +B; xn--5gbwa03bg24eptk.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 +N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 +T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 +N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 +B; xn--8v1d.xn--ye9h41035a2qqs; [V5 V6]; [V5 V6] +B; xn--8v1d.xn--1ug1386plvx1cd8vya; [C2 V5 V6]; [C2 V5 V6] # 𑑂.🞕 +B; -\u05E9。⒚; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ש.⒚ +B; -\u05E9。19.; [B1 V3]; [B1 V3] # -ש.19. +B; xn----gjc.19.; [B1 V3]; [B1 V3] # -ש.19. +B; xn----gjc.xn--cth; [B1 V3 V6]; [B1 V3 V6] # -ש.⒚ +T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ +N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ +T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ +N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ +B; xn--3vb50049s.xn--79e; [B5 B6 V6]; [B5 B6 V6] # ࡅ.ᢎ +B; xn--3vb882jz4411a.xn--79e259a; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ࡅ.ᢎ +T; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +T; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +B; SS\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; xn--ss-e2f077r.xn--85-psd; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; SS\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; xn--zca266bwrr.xn--85-psd; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +T; ß\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED.\u062085; ; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +B; SS\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +T; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +N; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +T; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +N; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +B; \u0ACD\u0484魅1.3𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅1.3𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅1.3𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; xn--1-0xb049b102o.xn--3ss-nv9t; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; xn--1-0xb049b102o.xn--3-qfa7018r; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +B; \u0ACD\u0484魅𝟣.₃𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅𝟣.₃𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅𝟣.₃𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u072B。𑓂⒈𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂⒈𑜫 +B; \u072B。𑓂1.𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂1.𑜫 +B; xn--1nb.xn--1-jq9i.xn--ji2dg9877c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂1.𑜫 +B; xn--1nb.xn--tsh7798f6rbrt828c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂⒈𑜫 +B; \uFE0Dછ。嵨; છ.嵨; xn--6dc.xn--tot +B; xn--6dc.xn--tot; છ.嵨; xn--6dc.xn--tot +B; છ.嵨; ; xn--6dc.xn--tot +B; Ⴔ≠Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; Ⴔ=\u0338Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; ⴔ=\u0338ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; ⴔ≠ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; xn--1ch603bxb.xn--do0dwa; [B1 V6]; [B1 V6] +B; xn--7md3b171g.xn--do0dwa; [B1 V6]; [B1 V6] +T; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -⒙𐫥.𝨵 +N; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -⒙𐫥.𝨵 +T; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [V3 V5] # -18.𐫥.𝨵 +N; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 +B; -18.xn--rx9c.xn--382h; [V3 V5]; [V3 V5] +B; xn---18-9m0a.xn--rx9c.xn--382h; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 +B; xn----ddps939g.xn--382h; [V3 V5 V6]; [V3 V5 V6] +B; xn----sgn18r3191a.xn--382h; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -⒙𐫥.𝨵 +B; ︒.ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] +B; 。.ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] +B; 。.Ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] +B; ..xn----73a596nuh9t; [B5 B6 A4_2]; [B5 B6 A4_2] +B; ︒.Ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] +B; xn--y86c.xn----73a596nuh9t; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; \uFE05︒。𦀾\u1CE0; [P1 V6]; [P1 V6] # ︒.𦀾᳠ +B; \uFE05。。𦀾\u1CE0; [A4_2]; [A4_2] # ..𦀾᳠ +B; ..xn--t6f5138v; [A4_2]; [A4_2] # ..𦀾᳠ +B; xn--y86c.xn--t6f5138v; [V6]; [V6] # ︒.𦀾᳠ +B; xn--t6f5138v; 𦀾\u1CE0; xn--t6f5138v # 𦀾᳠ +B; 𦀾\u1CE0; ; xn--t6f5138v # 𦀾᳠ +T; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑SS􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑Ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--ss-o412ac6305g.xn--07e; [B2 B3 V6]; [B2 B3 V6] +B; xn--zca9432wb989f.xn--07e; [B2 B3 V6]; [B2 B3 V6] +T; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꥓ج٬. +N; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꥓ج٬. +B; xn--rgb2k6711c.xn--ec8nj3948b; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꥓ج٬. +B; xn--rgb2k500fhq9j.xn--0ug78870a5sp9d; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꥓ج٬. +T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +B; xn--u836e.xn---ss-gl2a; [V3 V6]; [V3 V6] +B; xn--u836e.xn---ss-cn0at5l; [C1 V3 V6]; [C1 V3 V6] # .-ss≠ +B; xn--u836e.xn----qfa750ve7b; [C1 V3 V6]; [C1 V3 V6] # .-ß≠ +T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +B; xn--p8e.xn--1ch3a7084l; [V6]; [V6] +B; xn--p8e650b.xn--1ch3a7084l; [C1 V6]; [C1 V6] # ᡙ.≯𐋲≠ +B; 𐹧𞲄󠁭񆼩。\u034E🄀; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎🄀 +B; 𐹧𞲄󠁭񆼩。\u034E0.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎0. +B; xn--fo0dw409aq58qrn69d.xn--0-bgb.; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎0. +B; xn--fo0dw409aq58qrn69d.xn--sua6883w; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎🄀 +T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς +N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς +N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς +T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς +N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ +N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ +T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ +N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ +B; xn--vkj.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # ⴄ.ܡσ +B; xn--vkj.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡσ +B; xn--cnd.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # Ⴄ.ܡσ +B; xn--cnd.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡσ +B; xn--vkj.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡς +B; xn--cnd.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡς +T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς +N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ +N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ +T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ +N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ +B; 򮵛\u0613.Ⴕ; [P1 V6]; [P1 V6] # ؓ.Ⴕ +B; 򮵛\u0613.ⴕ; [P1 V6]; [P1 V6] # ؓ.ⴕ +B; xn--1fb94204l.xn--dlj; [V6]; [V6] # ؓ.ⴕ +B; xn--1fb94204l.xn--tnd; [V6]; [V6] # ؓ.Ⴕ +T; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +B; xn--ofg13qyr21c.xn--0f9au6706d; [B1 V5 V6]; [B1 V5 V6] # ≯ᷳ𞤥.꣄ +B; xn--ofg13qyr21c.xn--0ugc0116hix29k; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ≯ᷳ𞤥.꣄ +T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . +T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . +B; .xn--w720c; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--w720c; [C1 V6]; [C1 V6] # . +T; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [P1 V6] # ⒈ූ焅.ꡟ +N; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [C2 P1 V6] # ⒈ූ焅.ꡟ +T; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [P1 V5 V6] # 1.ූ焅.ꡟ +N; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 1.ූ焅.ꡟ +B; 1.xn--t1c6981c.xn--4c9a21133d; [V5 V6]; [V5 V6] # 1.ූ焅.ꡟ +B; 1.xn--t1c6981c.xn--1ugz184c9lw7i; [C2 V5 V6]; [C2 V5 V6] # 1.ූ焅.ꡟ +B; xn--t1c337io97c.xn--4c9a21133d; [V6]; [V6] # ⒈ූ焅.ꡟ +B; xn--t1c337io97c.xn--1ugz184c9lw7i; [C2 V6]; [C2 V6] # ⒈ූ焅.ꡟ +T; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +B; \u1DCDΣ≮.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ<\u0338.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ<\u0338.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ≮.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ≮.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ<\u0338.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; xn--4xa544kvid.xn--0-zmb55727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; xn--3xa744kvid.xn--0-xmb85727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +B; \u1DCDσ≮.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ<\u0338.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +T; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ +N; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ +B; 򢦾SS\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; 򢦾ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; 򢦾Ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; xn--ss-xjd6058xlz50g.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ssֹ𐫙.֭ࢡ +B; xn--zca89v339zj118e.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ßֹ𐫙.֭ࢡ +B; -𞣄。⒈; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -𞣄。1.; [B1 V3]; [B1 V3] +B; xn----xc8r.1.; [B1 V3]; [B1 V3] +B; xn----xc8r.xn--tsh; [B1 V3 V6]; [B1 V3 V6] +B; 񈠢𐫖𝟡。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 +B; 񈠢𐫖9。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 +B; xn--9-el5iv442t.xn--9gb0830l; [B5 V6]; [B5 V6] # 𐫖9.ؾ𑘿 +T; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. +N; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +T; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. +N; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +B; xn--hhbb5hc956w.; [B1]; [B1] # ٨نم٨ᩝ. +B; xn--hhbb5hc956w.xn--1ug; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +B; 𝟘.Ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم +B; 0.Ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم +B; 0.ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم +B; 0.xn--pgbe9ez79qd207lvff8b; [B1 B5 V6]; [B1 B5 V6] # 0.ⴇتجم +B; 0.xn--pgbe9e344c2725svff8b; [B1 B5 V6]; [B1 B5 V6] # 0.Ⴇتجم +B; 𝟘.ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم +B; 𑇀▍.⁞ᠰ; [V5]; [V5] +B; xn--9zh3057f.xn--j7e103b; [V5]; [V5] +T; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 P1 V3 V6] # -ٺ. +N; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # -ٺ. +B; xn----qrc.xn--ts49b; [B1 V3 V6]; [B1 V3 V6] # -ٺ. +B; xn----qrc357q.xn--ts49b; [B1 C2 V6]; [B1 C2 V6] # -ٺ. +T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +B; xn--46e6675axzzhota.xn--hdh99p; [B1 B5 V6]; [B1 B5 V6] +B; xn--46e6675axzzhota.xn--0ug06gu8f; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ +N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ +T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ +N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ +T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ +N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ +T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ +N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ +B; .xn--ynd036lq981an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--ynd036lq981an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .Ⴚⴔ +B; .xn--cljl81825an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--cljl81825an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ⴚⴔ +B; .xn--sndl01647an3h1h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--sndl01647an3h1h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ႺႴ +T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ +N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ +T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ +N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ +T; -3.\u200Dヌᢕ; [C2 V3]; [V3] # -3.ヌᢕ +N; -3.\u200Dヌᢕ; [C2 V3]; [C2 V3] # -3.ヌᢕ +B; -3.xn--fbf115j; [V3]; [V3] +B; -3.xn--fbf739aq5o; [C2 V3]; [C2 V3] # -3.ヌᢕ +T; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ß.- +N; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ß.- +T; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +T; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +T; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +B; xn--ss-pyd98921c.xn----nz8rh7531csznt; [B1 V3 V6]; [B1 V3 V6] # 🂃٦ss.- +B; xn--ss-pyd483x5k99b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ss.- +B; xn--zca34z68yzu83b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ß.- +T; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꇟ-ڟ. +N; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꇟ-ڟ. +B; xn----utc4430jd3zd.xn--bp20d; [B5 B6 V6]; [B5 B6 V6] # ꇟ-ڟ. +B; xn----utc4430jd3zd.xn--0ugx6670i; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꇟ-ڟ. +B; \u0665.\u0484𐨗𝩋𴤃; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ٥.҄𐨗𝩋 +B; xn--eib.xn--n3a0405kus8eft5l; [B1 V5 V6]; [B1 V5 V6] # ٥.҄𐨗𝩋 +B; -.񱼓\u0649𐨿; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -.ى𐨿 +B; -.xn--lhb4124khbq4b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -.ى𐨿 +T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4xa76659r.xn--ss-d64i8755h; [B2 B3 V6]; [B2 B3 V6] +B; xn--3xa96659r.xn--zca5051g4h4i; [B2 B3 V6]; [B2 B3 V6] +T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +B; xn--u4e969b.xn--1ch; [V6]; [V6] # ⅎ្.≠ +B; xn--u4e823bq1a.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # ⅎ្.≠ +B; xn--u4e319b.xn--1ch; [V6]; [V6] # Ⅎ្.≠ +B; xn--u4e823bcza.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # Ⅎ្.≠ +T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ +N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ +T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ +N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ +B; xn--3j9a14ak27osbz2o.xn--ljb175f; [B1 V5 V6]; [B1 V5 V6] # 𐋺꫶꥓.᜔ڏ +B; xn--3j9a14ak27osbz2o.xn--ljb175f1wg; [B1 C1 V6]; [B1 C1 V6] # 𐋺꫶꥓.᜔ڏ +B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ +B; xn--4fd57150h.xn--hdh; [V6]; [V6] # ྨ.≯ +T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 +N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 +N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 +N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +B; xn--blj7492l.xn--m27c; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; xn--1ugz52c4i16a.xn--m27c; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +B; xn--rnd5552v.xn--m27c; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] +B; xn--rnd379ex885a.xn--m27c; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 +N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; xn--ss-tu9hw933a.xn--08e; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; xn--zca2517f2hvc.xn--08e; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +T; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] +N; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] +B; 𐨿󠆌鸮𑚶.Σ; [V5]; [V5] +B; 𐨿󠆌鸮𑚶.σ; [V5]; [V5] +B; xn--l76a726rt2h.xn--4xa; [V5]; [V5] +B; xn--l76a726rt2h.xn--3xa; [V5]; [V5] +B; ⒗𞤬。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; 16.𞤬。-𑚶; [B1 V3]; [B1 V3] +B; 16.𞤊。-𑚶; [B1 V3]; [B1 V3] +B; 16.xn--ke6h.xn----4j0j; [B1 V3]; [B1 V3] +B; ⒗𞤊。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn--8shw466n.xn----4j0j; [B1 V3 V6]; [B1 V3 V6] +B; \u08B3𞤿⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; \u08B3𞤿隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; \u08B3𞤝隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; xn--8yb0383efiwk.xn--1-wsc3373r.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; \u08B3𞤝⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; xn--8yb0383efiwk.xn--ljb064mol4n; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; \u2433𚎛𝟧\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི +B; \u2433𚎛5\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི +B; xn--5-bqc410un435a.xn--8-rkc763epjj; [B5 B6 V6]; [B5 B6 V6] # 5١.ᡢ8ི +B; 𐹠.🄀⒒-󨰈; [B1 P1 V6]; [B1 P1 V6] +B; 𐹠.0.11.-󨰈; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn--7n0d.0.11.xn----8j07m; [B1 V3 V6]; [B1 V3 V6] +B; xn--7n0d.xn----xcp9757q1s13g; [B1 V6]; [B1 V6] +T; ς-。\u200C𝟭-; [C1 V3]; [V3] # ς-.1- +N; ς-。\u200C𝟭-; [C1 V3]; [C1 V3] # ς-.1- +T; ς-。\u200C1-; [C1 V3]; [V3] # ς-.1- +N; ς-。\u200C1-; [C1 V3]; [C1 V3] # ς-.1- +T; Σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- +N; Σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- +T; σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- +N; σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- +B; xn----zmb.1-; [V3]; [V3] +B; xn----zmb.xn--1--i1t; [C1 V3]; [C1 V3] # σ-.1- +B; xn----xmb.xn--1--i1t; [C1 V3]; [C1 V3] # ς-.1- +T; Σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- +N; Σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- +T; σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- +N; σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- +B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ +B; xn----ggf830f.xn--vkj; [V5]; [V5] # ᜴-ೢ.ⴄ +B; xn----ggf830f.xn--cnd; [V5 V6]; [V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ +B; 򭈗♋\u06BB𐦥。\u0954⒈; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔⒈ +B; 򭈗♋\u06BB𐦥。\u09541.; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔1. +B; xn--ukb372n129m3rs7f.xn--1-fyd.; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔1. +B; xn--ukb372n129m3rs7f.xn--u3b240l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔⒈ +T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +B; xn--vcb.xn--0kb623h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +B; xn--vcb.xn--0kb623hm1d; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; xn--4vb80kq29ayo62l.xn--8g6h; [B5 B6 V6]; [B5 B6 V6] # ࡆ≮્. +T; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [P1 V5 V6 A4_2] # .𞀘⒈ꡍ擉 +N; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .𞀘⒈ꡍ擉 +T; \u200D。𞀘1.ꡍ擉; [C2 V5]; [V5 A4_2] # .𞀘1.ꡍ擉 +N; \u200D。𞀘1.ꡍ擉; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 +B; .xn--1-1p4r.xn--s7uv61m; [V5 A4_2]; [V5 A4_2] +B; xn--1ug.xn--1-1p4r.xn--s7uv61m; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 +B; .xn--tsh026uql4bew9p; [V5 V6 A4_2]; [V5 V6 A4_2] +B; xn--1ug.xn--tsh026uql4bew9p; [C2 V5 V6]; [C2 V5 V6] # .𞀘⒈ꡍ擉 +B; ₈\u07CB.\uFB64≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; ₈\u07CB.\uFB64=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; 8\u07CB.\u067F≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; 8\u07CB.\u067F=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; xn--8-zbd.xn--4ib883l; [B1 B3 V6]; [B1 B3 V6] # 8ߋ.ٿ≠ +B; ᢡ\u07DE򹐣.⒒\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.⒒ق𑍦 +B; ᢡ\u07DE򹐣.11.\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.11.ق𑍦 +B; xn--5sb596fi873t.11.xn--ehb4198k; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.11.ق𑍦 +B; xn--5sb596fi873t.xn--ehb336mvy7n; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.⒒ق𑍦 +B; \u0E48-𐹺𝟜.\u0363\u06E1⒏; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ่-𐹺4.ͣۡ⒏ +B; \u0E48-𐹺4.\u0363\u06E18.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. +B; xn---4-owiz479s.xn--8-ihb69x.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. +B; xn---4-owiz479s.xn--eva20pjv9a; [B1 V5 V6]; [B1 V5 V6] # ่-𐹺4.ͣۡ⒏ +B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] +B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] +B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] +B; xn--r3i.xn----2wst7439i; [V6]; [V6] +B; xn--r3i.xn----z1g58579u; [V6]; [V6] +B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] +B; 𑑂◊.⦟∠; [V5]; [V5] +B; 𑑂◊.⦟∠; [V5]; [V5] +B; xn--01h3338f.xn--79g270a; [V5]; [V5] +B; 𿌰-\u0662。󋸛ꡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -٢.ꡂ +B; xn----dqc20828e.xn--bc9an2879c; [B5 B6 V6]; [B5 B6 V6] # -٢.ꡂ +B; \u0678。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 +B; \u064A\u0674。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 +B; xn--mhb8f.xn--oob2585kfdsfsbo7h; [B1 V6]; [B1 V6] # يٴ.݁𐹪 +T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ +N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ +N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +B; xn--y77ao18q.xn--wdf; [B2 B3]; [B2 B3] +B; xn--y77ao18q.xn--wdf367a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +B; ₀\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; ₀\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; 0\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; 0\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; xn--0-dqc.xn----ogov3342l; [B1 B6 V3 V6]; [B1 B6 V3 V6] # 0٢.≯- +B; \u031C𐹫-𞯃.𐋤\u0845; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ̜𐹫-.𐋤ࡅ +B; xn----gdb7046r692g.xn--3vb1349j; [B1 V5 V6]; [B1 V5 V6] # ̜𐹫-.𐋤ࡅ +B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; xn--1ch.xn--fcb363rk03mypug; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; xn--1ch.xn--fcb538c649rypog; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; 𖫳≠.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳=\u0338.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳=\u0338.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳≠.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; xn--1ch9250k.xn--rkj6232e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ch9250k.xn--7md2659j; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 +B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 +B; xn--wnb5a.xn--l0b161fis8gbp5m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ܶܦ.ᢚ閪𝩟 +T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +B; xn--blb8114f.xn----gmg236cj6k; [B1 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +B; xn--blb540ke10h.xn----gmg236cj6k; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; xn--o4c1723h8g85gt4ya.xn--4-dvc; [V6]; [V6] # 헁ฺ.ں4 +T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ +N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ +N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ +N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ +B; xn--lo0d.xn--mljx1099g; [B1 V6]; [B1 V6] +B; xn--lo0d.xn--0ugx72cwi33v; [B1 C1 V6]; [B1 C1 V6] # 𐹭.ⴞ +B; xn--lo0d.xn--2nd75260n; [B1 V6]; [B1 V6] +B; xn--lo0d.xn--2nd949eqw95u; [B1 C1 V6]; [B1 C1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ +N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ +B; \uA953.\u033D𑂽馋; [P1 V5 V6]; [P1 V5 V6] # ꥓.̽馋 +B; xn--3j9a.xn--bua0708eqzrd; [V5 V6]; [V5 V6] # ꥓.̽馋 +T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 +N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 +T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 +N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 +B; xn--g138cxw05a.xn--k0o; [V6]; [V6] +B; xn--1ug30527h9mxi.xn--k0o; [C2 V6]; [C2 V6] # .䜖 +T; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶🄉.۷🎪 +N; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶🄉.۷🎪 +T; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶8,.۷🎪 +N; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 +B; xn--8,-g9oy26fzu4d.xn--kmb6733w; [P1 V6]; [P1 V6] +B; xn--8,-g9oy26fzu4d.xn--kmb859ja94998b; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 +B; xn--c9e433epi4b3j20a.xn--kmb6733w; [V6]; [V6] +B; xn--c9e433epi4b3j20a.xn--kmb859ja94998b; [C2 V6]; [C2 V6] # ᡯ⚉姶🄉.۷🎪 +B; 𞽀.𐹸🚖\u0E3A; [B1 P1 V6]; [B1 P1 V6] # .𐹸🚖ฺ +B; xn--0n7h.xn--o4c9032klszf; [B1 V6]; [B1 V6] # .𐹸🚖ฺ +B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ +B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ +B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +B; xn--o7e997h.xn--mmb9ml895e; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +B; xn--snd659a.xn--mmb9ml895e; [B1 V6]; [B1 V6] # Ⴔᠵ.𐹧݇۹ +B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +T; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +N; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +T; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- +N; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82w..xn-----pe4u; [V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82wo4h..xn-----pe4u; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82w.xn-----c82nz547a; [V3 V5 V6]; [V3 V5 V6] # ፟ᡈ.︒-𖾐- +B; xn--b7d82wo4h.xn-----c82nz547a; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +T; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈⒖.ᷰߛ +N; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈⒖.ᷰߛ +T; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6 A4_2] # 1.15..ᷰߛ +N; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.15..ᷰߛ +B; 1.xn--15-1pd..xn--2sb914i; [B1 V5 V6 A4_2]; [B1 V5 V6 A4_2] # 1.15..ᷰߛ +B; 1.xn--15-1pd.xn--0ug.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.15..ᷰߛ +B; xn--jfb347mib.xn--2sb914i; [B1 V5 V6]; [B1 V5 V6] # ⒈⒖.ᷰߛ +B; xn--jfb844kmfdwb.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈⒖.ᷰߛ +B; 𝩜。-\u0B4DႫ; [P1 V3 V5 V6]; [P1 V3 V5 V6] # 𝩜.-୍Ⴋ +B; 𝩜。-\u0B4Dⴋ; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ +B; xn--792h.xn----bse820x; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ +B; xn--792h.xn----bse632b; [V3 V5 V6]; [V3 V5 V6] # 𝩜.-୍Ⴋ +T; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ +N; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ +T; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +N; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +B; SSჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssჀ.ؠ刯Ⴝ +B; ssⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ +B; Ssⴠ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssⴠ.ؠ刯Ⴝ +B; xn--ss-j81a.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssⴠ.ؠ刯Ⴝ +B; xn--ss-j81a.xn--fgb670rovy; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ +B; xn--ss-wgk.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssჀ.ؠ刯Ⴝ +B; xn--zca277t.xn--fgb670rovy; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +B; xn--zca442f.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ßჀ.ؠ刯Ⴝ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; xn--bnd957c2pe.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; xn--yxf24x4ol.xn--sib102gc69k; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; xn--bnd957cone.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u06EC.\u08A2𐹫\u067C; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ +B; xn--8lb.xn--1ib31ily45b; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ +B; \u06B6\u06DF。₇\uA806; [B1]; [B1] # ڶ۟.7꠆ +B; \u06B6\u06DF。7\uA806; [B1]; [B1] # ڶ۟.7꠆ +B; xn--pkb6f.xn--7-x93e; [B1]; [B1] # ڶ۟.7꠆ +B; \u06B6\u06DF.7\uA806; [B1]; [B1] # ڶ۟.7꠆ +T; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ +N; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ +T; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ +N; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ +T; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ +N; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ +T; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ +N; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ +B; xn--rlj6323e.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--rlj6323e.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ⴣ𐹻.𝪣≮ +B; xn--7nd8101k.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--7nd8101k.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴣ𐹻.𝪣≮ +T; 𝟵隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. +N; 𝟵隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. +T; 9隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. +N; 9隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. +B; xn--9-mfs8024b.; 9隁⯮.; xn--9-mfs8024b.; NV8 +B; 9隁⯮.; ; xn--9-mfs8024b.; NV8 +B; xn--9-mfs8024b.xn--0ug; [C1]; [C1] # 9隁⯮. +B; ⒏𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.Ⴣ྄彦 +B; 8.𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # 8.𐹧.Ⴣ྄彦 +B; 8.𐹧。ⴣ\u0F84彦; [B1]; [B1] # 8.𐹧.ⴣ྄彦 +B; 8.xn--fo0d.xn--3ed972m6o8a; [B1]; [B1] # 8.𐹧.ⴣ྄彦 +B; 8.xn--fo0d.xn--3ed15dt93o; [B1 V6]; [B1 V6] # 8.𐹧.Ⴣ྄彦 +B; ⒏𐹧。ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.ⴣ྄彦 +B; xn--0sh2466f.xn--3ed972m6o8a; [B1 V6]; [B1 V6] # ⒏𐹧.ⴣ྄彦 +B; xn--0sh2466f.xn--3ed15dt93o; [B1 V6]; [B1 V6] # ⒏𐹧.Ⴣ྄彦 +B; -问񬰔⒛。\u0604-񜗉橬; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -问⒛.-橬 +B; -问񬰔20.。\u0604-񜗉橬; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] # -问20..-橬 +B; xn---20-658jx1776d..xn----ykc7228efm46d; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] # -问20..-橬 +B; xn----hdpu849bhis3e.xn----ykc7228efm46d; [B1 V3 V6]; [B1 V3 V6] # -问⒛.-橬 +T; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 +N; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 +T; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 +N; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 +T; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [V5] # ᮬⴌ̥.2 +N; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; xn--mta176jjjm.2; [V5]; [V5] # ᮬⴌ̥.2 +B; xn--mta176j97cl2q.2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; xn--mta930emri.2; [V5 V6]; [V5 V6] # ᮬႬ̥.2 +B; xn--mta930emribme.2; [C1 V5 V6]; [C1 V5 V6] # ᮬႬ̥.2 +T; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [V5] # ᮬⴌ̥.2 +N; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; \uDC5F。\uA806\u0669󠒩; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.xn--iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.XN--IIB9583FUSY0I; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.Xn--Iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; 󠄁\u035F⾶。₇︒눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ +B; 󠄁\u035F⾶。₇︒눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ +B; 󠄁\u035F飛。7。눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ +B; 󠄁\u035F飛。7。눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ +B; xn--9ua0567e.7.xn--gdh6767c; [V5 V6]; [V5 V6] # ͟飛.7.눇≮ +B; xn--9ua0567e.xn--7-ngou006d1ttc; [V5 V6]; [V5 V6] # ͟飛.7︒눇≮ +T; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1] # 𐹴.鳥 +N; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +T; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1] # 𐹴.鳥 +N; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +B; xn--so0d.xn--6x6a; [B1]; [B1] +B; xn--0ugc6024p.xn--0ug1920c; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 +N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 +T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 +N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 +B; xn--lj8h.xn--ln9ci476aqmr2g; [B1 V6]; [B1 V6] +B; xn--lj8h.xn--1ug6603gr1pfwq37h; [B1 C2 V6]; [B1 C2 V6] # 🍮.𐦁𝨝 +T; \u067D\u0943.𞤓\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 +N; \u067D\u0943.𞤓\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 +T; \u067D\u0943.𞤵\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 +N; \u067D\u0943.𞤵\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 +B; xn--2ib43l.xn--te6h; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; \u067D\u0943.𞤵; ; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; \u067D\u0943.𞤓; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; xn--2ib43l.xn--1ugy711p; [B3 C2]; [B3 C2] # ٽृ.𞤵 +B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ +B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ +B; xn----gqc711a.xn--9jd88234f3qm0b; [B1 V3 V6]; [B1 V3 V6] # ٤੍-.္ +T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +B; xn--4--e4j7831r.xn--gdh8754cz40c; [B1 V5 V6]; [B1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +B; xn--4--e4j7831r.xn--0ugy6gjy5sl3ud; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ +N; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ +T; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ +N; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ +B; .xn--3ed0b20h; [V6 A4_2]; [V6 A4_2] # .྄ྖ +B; xn--0ug.xn--3ed0b20h; [C1 V6]; [C1 V6] # .྄ྖ +B; .xn--3ed0by082k; [V6 A4_2]; [V6 A4_2] # .྄ྖ +B; xn--0ug.xn--3ed0by082k; [C1 V6]; [C1 V6] # .྄ྖ +T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +B; xn--hdh84488f.xn--xy7cw2886b; [V6]; [V6] +B; xn--hdh84488f.xn--1ug8099fbjp4e; [C2 V6]; [C2 V6] # ≯.𐅼 +T; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +N; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +T; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +N; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +B; \u0641SS𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641Ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; xn--ss-jvd2339x.xn--7-mm5i; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; xn--zca96ys96y.xn--7-mm5i; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +B; \u0641SS𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641Ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +T; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 +N; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 +B; SS\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; Ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; xn--ss-9qet02k.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ssެާࢱ.𐭁𐹲 +B; xn--zca685aoa95h.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ßެާࢱ.𐭁𐹲 +B; -。󠉗⒌𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -。󠉗5.𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -.xn--5-zz21m.xn--6x6h; [B1 V3 V6]; [B1 V3 V6] +B; -.xn--xsh6367n1bi3e; [B1 V3 V6]; [B1 V3 V6] +T; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +B; 𼎏Σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏Σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; xn--4xa92520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # σ.-≮خج +B; xn--3xa13520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # ς.-≮خج +B; 𼎏Σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏Σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\uFC3E; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي +B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\u0643\u064A; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي +B; xn--jnb34fs003a.xn--fhbo927bk128mpi24d; [B5 B6 V6]; [B5 B6 V6] # ꡗࢸܙ.్كي +B; 𐠰\u08B7𞤌𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤮𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; xn--dzb5191kezbrw47a.xn--p4e3841jz9tf; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤮𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤌𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +T; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; xn--2---u58b.xn--jlb8024k14g; [B1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; xn--2---u58b.xn--jlb820ku99nbgj; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +B; xn----bp5n.xn----bnc231l; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ +N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ +N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; -ss.xn--bhb925glx3p; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +B; xn---ss-8m0a.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; xn----qfa550v.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; xn--s5a04sn4u297k.xn--2e1b; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; xn--d5a07sn4u297k.xn--2e1b; [V6]; [V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 +B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 +B; xn--3g9a.xn--ud1dz07k; [V5 V6]; [V5 V6] # ꣪.𑆾 +B; 󇓓𑚳。񐷿≯⾇; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿>\u0338⾇; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿≯舛; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿>\u0338舛; [P1 V6]; [P1 V6] +B; xn--3e2d79770c.xn--hdh0088abyy1c; [V6]; [V6] +T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. +N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. +N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +B; xn--9hb7344k.; 𐫇\u0661.; xn--9hb7344k. # 𐫇١. +B; 𐫇\u0661.; ; xn--9hb7344k. # 𐫇١. +B; xn--9hb652kv99n.xn--0ugb; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +B; xn--bbf561cf95e57y3e.xn--hdh0834o7mj6b; [V6]; [V6] +B; xn--bbf561cf95e57y3e.xn--0ugz6gc910ejro8c; [C1 V6]; [C1 V6] # 砪≯ᢑ.≯𝩚 +B; Ⴥ.𑄳㊸; [P1 V5 V6]; [P1 V5 V6] +B; Ⴥ.𑄳43; [P1 V5 V6]; [P1 V5 V6] +B; ⴥ.𑄳43; [V5]; [V5] +B; xn--tlj.xn--43-274o; [V5]; [V5] +B; xn--9nd.xn--43-274o; [V5 V6]; [V5 V6] +B; ⴥ.𑄳㊸; [V5]; [V5] +B; 𝟎\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 0\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 0\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; xn--0-fqc.xn--10b369eivp359r; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; xn--0-fqc.xn--10b180bnwgfy0z; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 𝟎\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; 񗪨󠄉\uFFA0\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ +B; 񗪨󠄉\u1160\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ +B; xn--kgd36f9z57y.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ +B; xn--kgd7493jee34a.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ +T; \u0618.۳\u200C\uA953; [C1 V5]; [V5] # ؘ.۳꥓ +N; \u0618.۳\u200C\uA953; [C1 V5]; [C1 V5] # ؘ.۳꥓ +B; xn--6fb.xn--gmb0524f; [V5]; [V5] # ؘ.۳꥓ +B; xn--6fb.xn--gmb469jjf1h; [C1 V5]; [C1 V5] # ؘ.۳꥓ +B; ᡌ.︒ᢑ; [P1 V6]; [P1 V6] +B; ᡌ.。ᢑ; [A4_2]; [A4_2] +B; xn--c8e..xn--bbf; [A4_2]; [A4_2] +B; xn--c8e.xn--bbf9168i; [V6]; [V6] +B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. +B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. +B; xn--xld7443k.xn--4o7h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑋪ၳ. +B; 𞷏。ᠢ򓘆; [P1 V6]; [P1 V6] +B; xn--hd7h.xn--46e66060j; [V6]; [V6] +T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +B; xn--iym9428c.xn--e1g3464g08p3b; [B1 V5 V6]; [B1 V5 V6] # 𑄳㴼.𐹡⃫ +B; xn--iym9428c.xn--0ug46a7218cllv0c; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑄳㴼.𐹡⃫ +B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ +B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ +B; xn--ro0dw7dey96m.xn--eta; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𐹳𑈯.̝ +B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 +B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 +B; xn--39e4566fjv8bwmt6n.xn--myb6415k; [V6]; [V6] # ᢊ뾜𑚶.𐹥 +T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +B; xn--1ch6704g.xn--m97cw2999c; [B1 V6]; [B1 V6] +B; xn--1ch6704g.xn--0ug3840g51u4g; [B1 C1 V6]; [B1 C1 V6] # 𐹥≠.𐋲 +T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ +N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ +T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ +N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ +B; xn--n3b542bb085j.xn--3j9al95p; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ्.꥓ +B; xn--n3b542bb085j.xn--1ug6815co9wc; [B1 C2 V6]; [B1 C2 V6] # ्.꥓ +B; 򌋔󠆎󠆗𑲕。≮; [P1 V6]; [P1 V6] +B; 򌋔󠆎󠆗𑲕。<\u0338; [P1 V6]; [P1 V6] +B; xn--4m3dv4354a.xn--gdh; [V6]; [V6] +B; 󠆦.\u08E3暀≠; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ +B; 󠆦.\u08E3暀=\u0338; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ +B; .xn--m0b461k3g2c; [V5 V6 A4_2]; [V5 V6 A4_2] # .ࣣ暀≠ +B; 𐡤\uABED。\uFD30򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ +B; 𐡤\uABED。\u0634\u0645򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ +B; xn--429ak76o.xn--zgb8a701kox37t; [B2 B3 V6]; [B2 B3 V6] # 𐡤꯭.شمᷰ +T; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.Ⴌ +N; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.Ⴌ +T; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..Ⴌ +N; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..Ⴌ +T; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..ⴌ +N; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..ⴌ +B; xn--1-px8q..xn--3kj4524l; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] +B; xn--1-tgn9827q..xn--3kj4524l; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..ⴌ +B; xn--1-px8q..xn--knd8464v; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] +B; xn--1-tgn9827q..xn--knd8464v; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..Ⴌ +T; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.ⴌ +N; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.ⴌ +B; xn--tshz828m.xn--3kj4524l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ug68oq348b.xn--3kj4524l; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.ⴌ +B; xn--tshz828m.xn--knd8464v; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ug68oq348b.xn--knd8464v; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.Ⴌ +T; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +N; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +B; 󠣙\u0A4D𱫘𞤖.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; 󠣙\u0A4D𱫘𞤸.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; 󠣙\u0A4D𱫘𞤖.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; xn--ybc0236vjvxgt5q0g.xn--4xa82737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.σ +T; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +N; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +B; xn--ybc0236vjvxgt5q0g.xn--3xa03737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.ς +B; 󠣙\u0A4D𱫘𞤸.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +T; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ߓ.𐫀 +N; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ߓ.𐫀 +B; xn--usb.xn--pw9ci1099a; [B2 B3 V6]; [B2 B3 V6] # ߓ.𐫀 +B; xn--usb.xn--0ug9553gm3v5d; [B1 C1 V6]; [B1 C1 V6] # ߓ.𐫀 +B; \u1C2E𞀝.\u05A6ꡟ𞤕󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +B; \u1C2E𞀝.\u05A6ꡟ𞤷󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +B; xn--q1f4493q.xn--xcb8244fifvj; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. +N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. +T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. +N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. +B; xn--0on3543c5981i.; [V6]; [V6] +B; xn--0on3543c5981i.xn--1ug; [C2 V6]; [C2 V6] # 䂹𐋦. +T; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꧀𐹲.ݧ🄉 +N; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ🄉 +T; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, +N; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--7m9an32q.xn--8,-qle; [B3 B5 B6 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--0uga8686hdgvd.xn--8,-qle; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--7m9an32q.xn--rpb6081w; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꧀𐹲.ݧ🄉 +B; xn--0uga8686hdgvd.xn--rpb6081w; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꧀𐹲.ݧ🄉 +B; ︒。Ⴃ≯; [P1 V6]; [P1 V6] +B; ︒。Ⴃ>\u0338; [P1 V6]; [P1 V6] +B; 。。Ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。Ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ..xn--hdh782b; [V6 A4_2]; [V6 A4_2] +B; ..xn--bnd622g; [V6 A4_2]; [V6 A4_2] +B; ︒。ⴃ>\u0338; [P1 V6]; [P1 V6] +B; ︒。ⴃ≯; [P1 V6]; [P1 V6] +B; xn--y86c.xn--hdh782b; [V6]; [V6] +B; xn--y86c.xn--bnd622g; [V6]; [V6] +T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. +N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. +T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. +N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. +B; xn--mo0d.xn--wy46e; [B1 V6]; [B1 V6] +B; xn--mo0d.xn--1ug18431l; [B1 C2 V6]; [B1 C2 V6] # 𐹮. +T; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # Ⴞ𐹨.︒ݽႯ +N; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴞ𐹨.︒ݽႯ +T; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ +N; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 C2 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ +T; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ +N; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--mlju223e..xn--eqb053q; [B2 B3 B5 B6 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--mlju223e..xn--eqb096jpgj; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--2nd0990k..xn--eqb228b; [B2 B3 B5 B6 V6 A4_2]; [B2 B3 B5 B6 V6 A4_2] # Ⴞ𐹨..ݽႯ +B; xn--2nd0990k..xn--eqb228bgzm; [B2 B3 B5 B6 C2 V6 A4_2]; [B2 B3 B5 B6 C2 V6 A4_2] # Ⴞ𐹨..ݽႯ +T; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # ⴞ𐹨.︒ݽⴏ +N; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--mlju223e.xn--eqb053qjk7l; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--mlju223e.xn--eqb096jpgj9y7r; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--2nd0990k.xn--eqb228b583r; [B1 B5 B6 V6]; [B1 B5 B6 V6] # Ⴞ𐹨.︒ݽႯ +B; xn--2nd0990k.xn--eqb228bgzmvp0t; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴞ𐹨.︒ݽႯ +T; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +N; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +N; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +N; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-lvs.xn-----vue617w; [B1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-rgnv99c.xn-----vue617w; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-i0g.xn-----vue617w; [B1 V3 V6]; [B1 V3 V6] # Ⴆ3.-⃒-ߑ +B; xn--3-i0g939i.xn-----vue617w; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +N; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; xn----4wsr321ay823p.xn----tfot873s; [V6]; [V6] +B; xn----11g3013fy8x5m.xn----tfot873s; [V6]; [V6] +B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; \u07E5.\u06B5; ; xn--dtb.xn--okb # ߥ.ڵ +B; xn--dtb.xn--okb; \u07E5.\u06B5; xn--dtb.xn--okb # ߥ.ڵ +T; \u200C\u200D.𞤿; [B1 C1 C2]; [A4_2] # .𞤿 +N; \u200C\u200D.𞤿; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +T; \u200C\u200D.𞤝; [B1 C1 C2]; [A4_2] # .𞤿 +N; \u200C\u200D.𞤝; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +B; .xn--3e6h; [A4_2]; [A4_2] +B; xn--0ugc.xn--3e6h; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +B; xn--3e6h; 𞤿; xn--3e6h +B; 𞤿; ; xn--3e6h +B; 𞤝; 𞤿; xn--3e6h +T; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +N; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +B; 🜑𐹧\u0639.Σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; 🜑𐹧\u0639.σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; xn--4gb3736kk4zf.xn--4xa2248dy27d; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; xn--4gb3736kk4zf.xn--3xa4248dy27d; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. +B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. +B; xn--iib777sp230oo708a.xn--7824e; [B5 B6 V6]; [B5 B6 V6] # ス٩. +B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; xn--8c3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; xn--8c3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; XN--8C3HU7971A.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; XN--8C3HU7971A.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; Xn--8C3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; Xn--8C3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +T; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 P1 V6] # ٠.ݗ +N; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٠.ݗ +B; xn--8hb82030l.xn--bpb; [B1 V6]; [B1 V6] # ٠.ݗ +B; xn--8hb852ke991q.xn--bpb; [B1 C1 V6]; [B1 C1 V6] # ٠.ݗ +T; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [V3 V5] # ်.- +N; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [C1 V3 V5] # ်.- +B; xn--bkd.-; [V3 V5]; [V3 V5] # ်.- +B; xn--bkd412fca.xn----sgn; [C1 V3 V5]; [C1 V3 V5] # ်.- +B; ︒。\u1B44ᡉ; [P1 V5 V6]; [P1 V5 V6] # ︒.᭄ᡉ +B; 。。\u1B44ᡉ; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ +B; ..xn--87e93m; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ +B; xn--y86c.xn--87e93m; [V5 V6]; [V5 V6] # ︒.᭄ᡉ +T; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +N; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +T; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +N; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +B; \u0758SS。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758Ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; xn--ss-gke.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; xn--zca724a.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +B; \u0758SS。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758Ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u07C3𞶇ᚲ.\u0902\u0353𝟚\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ +B; \u07C3𞶇ᚲ.\u0902\u03532\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ +B; xn--esb067enh07a.xn--2-lgb874bjxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߃ᚲ.ं͓2্ +T; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6] # -᮫︒. +N; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫︒. +T; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6 A4_2] # -᮫.. +N; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫.. +B; xn----qml..xn--x50zy803a; [V3 V6 A4_2]; [V3 V6 A4_2] # -᮫.. +B; xn----qml.xn--1ug.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫.. +B; xn----qml1407i.xn--x50zy803a; [V3 V6]; [V3 V6] # -᮫︒. +B; xn----qmlv7tw180a.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫︒. +B; 󠦮.≯𞀆; [P1 V6]; [P1 V6] +B; 󠦮.>\u0338𞀆; [P1 V6]; [P1 V6] +B; xn--t546e.xn--hdh5166o; [V6]; [V6] +B; -𑄳󠊗𐹩。𞮱; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----p26i72em2894c.xn--zw6h; [B1 V3 V6]; [B1 V3 V6] +B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ +B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ +B; xn--skb.xn--osd737a; [V6]; [V6] # ڹ.ᡳ +B; 㨛𘱎.︒𝟕\u0D01; [P1 V6]; [P1 V6] # 㨛.︒7ഁ +B; 㨛𘱎.。7\u0D01; [P1 V6 A4_2]; [P1 V6 A4_2] # 㨛..7ഁ +B; xn--mbm8237g..xn--7-7hf; [V6 A4_2]; [V6 A4_2] # 㨛..7ഁ +B; xn--mbm8237g.xn--7-7hf1526p; [V6]; [V6] # 㨛.︒7ഁ +B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; xn----dxc06304e.xn--gdh5020pk5c; [B1 B3 V3 V6]; [B1 B3 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ +N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ +N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ +N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ +T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ +N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--5nd703gyrh; [V6]; [V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4ep585bkm5p.xn--5nd703gyrh; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--ifh802b6a; ss\uAAF6ᢥ.⊶ⴡⴖ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ +B; ss\uAAF6ᢥ.⊶ⴡⴖ; ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ +B; SS\uAAF6ᢥ.⊶ჁႶ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +B; Ss\uAAF6ᢥ.⊶Ⴡⴖ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--undv409k; [V6]; [V6] # ss꫶ᢥ.⊶ჁႶ +B; xn--ss-4ep585bkm5p.xn--ifh802b6a; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +B; xn--ss-4ep585bkm5p.xn--undv409k; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶ჁႶ +B; xn--zca682johfi89m.xn--ifh802b6a; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +B; xn--zca682johfi89m.xn--undv409k; [C1 V6]; [C1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ +N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ +T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ +N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +T; \u200D。ς󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .ς +N; \u200D。ς󠁉; [C2 P1 V6]; [C2 P1 V6] # .ς +T; \u200D。Σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ +N; \u200D。Σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ +T; \u200D。σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ +N; \u200D。σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ +B; .xn--4xa24344p; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--4xa24344p; [C2 V6]; [C2 V6] # .σ +B; xn--1ug.xn--3xa44344p; [C2 V6]; [C2 V6] # .ς +T; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- +N; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- +T; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- +N; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- +T; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +B; xn--ss-2722a.xn----z3c03218a; [B2 B3 V3 V6]; [B2 B3 V3 V6] # ss.ݑ𞤽- +B; xn--ss-2722a.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ss.ݑ𞤽- +B; xn--zca5423w.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ß.ݑ𞤽- +T; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +B; xn--qb2ds317a.xn----k26iq1483f; [B1 V3 V5 V6]; [B1 V3 V5 V6] +B; xn--1ugz808gdimf.xn----k26iq1483f; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +B; ⒒򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] +B; 11.򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] +B; 11.xn--uz1d59632bxujd.xn----x310m; [V3 V6]; [V3 V6] +B; xn--3shy698frsu9dt1me.xn----x310m; [V3 V6]; [V3 V6] +T; -。\u200D; [C2 V3]; [V3] # -. +N; -。\u200D; [C2 V3]; [C2 V3] # -. +T; -。\u200D; [C2 V3]; [V3] # -. +N; -。\u200D; [C2 V3]; [C2 V3] # -. +B; -.; [V3]; [V3] +B; -.xn--1ug; [C2 V3]; [C2 V3] # -. +T; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +B; <\u0338ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; xn--88e732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; XN--88E732C.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +T; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +B; Xn--88E732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ቬ򔠼񁗶。𐨬𝟠; [P1 V6]; [P1 V6] +B; ቬ򔠼񁗶。𐨬8; [P1 V6]; [P1 V6] +B; xn--d0d41273c887z.xn--8-ob5i; [V6]; [V6] +B; 𐱲。蔫\u0766; [B5 B6 P1 V6]; [B5 B6 P1 V6] # .蔫ݦ +B; xn--389c.xn--qpb7055d; [B5 B6 V6]; [B5 B6 V6] # .蔫ݦ +B; 򒲧₃。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه +B; 򒲧3。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه +B; xn--3-ep59g.xn--jhb5904fcp0h; [B5 B6 V6]; [B5 B6 V6] # 3.ꡚ𛇑ه +T; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +N; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +T; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +N; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +B; 蓸\u0642=\u0338.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642=\u0338.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642=\u0338.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; xn--ehb015lnt1e.ss; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ss +B; xn--ehb015lnt1e.xn--zca; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ß +T; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # ࡎٺී⒊.𐹹 +N; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࡎٺී⒊.𐹹 +T; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡎٺී3..𐹹 +N; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--3-prc71ls9j..xn--xo0dw109an237f; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--3-prc71ls9j..xn--0ug3205g7eyf3c96h; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--zib94gfziuq1a.xn--xo0dw109an237f; [B1 V6]; [B1 V6] # ࡎٺී⒊.𐹹 +B; xn--zib94gfziuq1a.xn--0ug3205g7eyf3c96h; [B1 C1 V6]; [B1 C1 V6] # ࡎٺී⒊.𐹹 +T; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # ς-.Ⴣ𦟙 +N; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ς-.Ⴣ𦟙 +T; ς\u200D-.ⴣ𦟙; [C2 V3]; [V3] # ς-.ⴣ𦟙 +N; ς\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 +T; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # σ-.Ⴣ𦟙 +N; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # σ-.Ⴣ𦟙 +T; σ\u200D-.ⴣ𦟙; [C2 V3]; [V3] # σ-.ⴣ𦟙 +N; σ\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 +B; xn----zmb.xn--rlj2573p; [V3]; [V3] +B; xn----zmb048s.xn--rlj2573p; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 +B; xn----zmb.xn--7nd64871a; [V3 V6]; [V3 V6] +B; xn----zmb048s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # σ-.Ⴣ𦟙 +B; xn----xmb348s.xn--rlj2573p; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 +B; xn----xmb348s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # ς-.Ⴣ𦟙 +B; ≠。🞳𝟲; [P1 V6]; [P1 V6] +B; =\u0338。🞳𝟲; [P1 V6]; [P1 V6] +B; ≠。🞳6; [P1 V6]; [P1 V6] +B; =\u0338。🞳6; [P1 V6]; [P1 V6] +B; xn--1ch.xn--6-dl4s; [V6]; [V6] +B; 󅬽.蠔; [P1 V6]; [P1 V6] +B; xn--g747d.xn--xl2a; [V6]; [V6] +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +B; xn--p0b.xn--e43b; [V5]; [V5] # ࣦ.뼽 +B; xn--p0b869i.xn--e43b; [C2 V5]; [C2 V5] # ࣦ.뼽 +B; ₇\u0BCD􃂷\u06D2。👖\u0675-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- +B; 7\u0BCD􃂷\u06D2。👖\u0627\u0674-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- +B; xn--7-rwc839aj3073c.xn----ymc5uv818oghka; [B1 V6]; [B1 V6] # 7்ے.👖اٴ- +B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ +B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ +B; -.xn--cqb; [B1 V3]; [B1 V3] # -.ݻ +B; 𑇌𵛓。-⒈ꡏ\u072B; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑇌.-⒈ꡏܫ +B; 𑇌𵛓。-1.ꡏ\u072B; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 𑇌.-1.ꡏܫ +B; xn--8d1dg030h.-1.xn--1nb7163f; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 𑇌.-1.ꡏܫ +B; xn--8d1dg030h.xn----u1c466tp10j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 𑇌.-⒈ꡏܫ +B; 璛\u1734\u06AF.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- +B; xn--ikb175frt4e.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; xn--qyb07fj857a.xn--728bv72h; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 񍨽.񋸕; [P1 V6]; [P1 V6] +B; 񍨽.񋸕; [P1 V6]; [P1 V6] +B; xn--pr3x.xn--rv7w; [V6]; [V6] +B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; xn--2ib0338v.xn----zvs0199fo91g; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; xn--2ib0338v.xn----w0g2740ro9vg; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; 𐯀𐸉𞧏。񢚧₄Ⴋ񂹫; [P1 V6]; [P1 V6] +B; 𐯀𐸉𞧏。񢚧4Ⴋ񂹫; [P1 V6]; [P1 V6] +B; 𐯀𐸉𞧏。񢚧4ⴋ񂹫; [P1 V6]; [P1 V6] +B; xn--039c42bq865a.xn--4-wvs27840bnrzm; [V6]; [V6] +B; xn--039c42bq865a.xn--4-t0g49302fnrzm; [V6]; [V6] +B; 𐯀𐸉𞧏。񢚧₄ⴋ񂹫; [P1 V6]; [P1 V6] +B; 4\u06BD︒󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ +B; 4\u06BD︒󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ +B; 4\u06BD。󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ +B; 4\u06BD。󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ +B; xn--4-kvc.xn--5136e.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ..≠ +B; xn--4-kvc5601q2h50i.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ︒.≠ +B; 𝟓。\u06D7; [V5]; [V5] # 5.ۗ +B; 5。\u06D7; [V5]; [V5] # 5.ۗ +B; 5.xn--nlb; [V5]; [V5] # 5.ۗ +T; \u200C򺸩.⾕; [C1 P1 V6]; [P1 V6] # .谷 +N; \u200C򺸩.⾕; [C1 P1 V6]; [C1 P1 V6] # .谷 +T; \u200C򺸩.谷; [C1 P1 V6]; [P1 V6] # .谷 +N; \u200C򺸩.谷; [C1 P1 V6]; [C1 P1 V6] # .谷 +B; xn--i183d.xn--6g3a; [V6]; [V6] +B; xn--0ug26167i.xn--6g3a; [C1 V6]; [C1 V6] # .谷 +T; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [P1 V3 V6] # ︒.-ܼ +N; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [C1 C2 P1 V3 V6] # ︒.-ܼ +T; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] # ..-ܼ +N; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [C1 C2 P1 V3 V6 A4_2] # ..-ܼ +B; .xn--hh50e.xn----t2c; [V3 V6 A4_2]; [V3 V6 A4_2] # ..-ܼ +B; .xn--1ug05310k.xn----t2c071q; [C1 C2 V3 V6 A4_2]; [C1 C2 V3 V6 A4_2] # ..-ܼ +B; xn--y86c71305c.xn----t2c; [V3 V6]; [V3 V6] # ︒.-ܼ +B; xn--1ug1658ftw26f.xn----t2c071q; [C1 C2 V3 V6]; [C1 C2 V3 V6] # ︒.-ܼ +B; ≯𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; ≯𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; xn--hdhz520p.xn--48e; [B1 V6]; [B1 V6] +B; \u0F74𫫰𝨄。\u0713𐹦; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 +B; xn--ned8985uo92e.xn--dnb6395k; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 +B; \u033C\u07DB⁷𝟹。𝟬; [B1 V5]; [B1 V5] # ̼ߛ73.0 +B; \u033C\u07DB73。0; [B1 V5]; [B1 V5] # ̼ߛ73.0 +B; xn--73-9yb648b.0; [B1 V5]; [B1 V5] # ̼ߛ73.0 +T; \u200D.𝟗; [C2]; [A4_2] # .9 +N; \u200D.𝟗; [C2]; [C2] # .9 +T; \u200D.9; [C2]; [A4_2] # .9 +N; \u200D.9; [C2]; [C2] # .9 +B; .9; [A4_2]; [A4_2] +B; xn--1ug.9; [C2]; [C2] # .9 +B; 9; ; +B; \u0779ᡭ𪕈。\u06B6\u08D9; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ +B; xn--9pb497fs270c.xn--pkb80i; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; xn--5-j1c97c2483c.xn--e7f2093h; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ܦ5ߢ겙.᳴ +T; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴍꡨ֮.Ⴞ +N; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍꡨ֮.Ⴞ +T; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴍꡨ֮.ⴞ +N; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍꡨ֮.ⴞ +B; xn--5cb172r175fug38a.xn--mlj; [V6]; [V6] # ⴍꡨ֮.ⴞ +B; xn--5cb172r175fug38a.xn--0uga051h; [C1 V6]; [C1 V6] # ⴍꡨ֮.ⴞ +B; xn--5cb347co96jug15a.xn--2nd; [V6]; [V6] # Ⴍꡨ֮.Ⴞ +B; xn--5cb347co96jug15a.xn--2nd059ea; [C1 V6]; [C1 V6] # Ⴍꡨ֮.Ⴞ +B; 𐋰。󑓱; [P1 V6]; [P1 V6] +B; xn--k97c.xn--q031e; [V6]; [V6] +B; 󡎦\u17B4\u0B4D.𐹾; [B1 P1 V6]; [B1 P1 V6] # ୍.𐹾 +B; xn--9ic364dho91z.xn--2o0d; [B1 V6]; [B1 V6] # ୍.𐹾 +B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; xn--i0b436pkl2g2h42a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟⴋ귤.0휪ૣ +B; xn--i0b601b6r7l2hs0a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 +B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 +B; xn--lqb.xn--jfb1808v; [V6]; [V6] # ބ.𞡝 +T; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +N; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +T; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +N; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +B; xn--3-yke.xn--8-sl4et308f; [V5]; [V5] # ્3.8꣄🃤 +B; xn--3-yke.xn--8-ugnv982dbkwm; [V5]; [V5] # ્3.8꣄🃤 +B; ℻⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; FAX⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; Fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; xn--fax-4c9a1676t.xn--6e6h; [B6]; [B6] +B; ℻⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; FAX⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆.𞥂; [B6]; [B6] +B; FAX⩷𝆆.𞤠; [B6]; [B6] +B; Fax⩷𝆆.𞤠; [B6]; [B6] +B; FAX⩷𝆆.𞥂; [B6]; [B6] +B; Fax⩷𝆆.𞥂; [B6]; [B6] +B; ꡕ≠\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ≠\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; xn--cld333gn31h0158l.xn--psd1510k; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. +B; xn--cld333gn31h0158l.xn--cl7c96v; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. +T; 鱊。\u200C; [C1]; xn--rt6a. # 鱊. +N; 鱊。\u200C; [C1]; [C1] # 鱊. +B; xn--rt6a.; 鱊.; xn--rt6a. +B; 鱊.; ; xn--rt6a. +B; xn--rt6a.xn--0ug; [C1]; [C1] # 鱊. +B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; xn--8-d26i.xn--0p1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; ⏹𐧀.𐫯; [B1]; [B1] +B; ⏹𐧀.𐫯; [B1]; [B1] +B; xn--qoh9161g.xn--1x9c; [B1]; [B1] +T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +B; xn--4-0bd15808a.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. +B; 𞤺\u07CC4.; ; xn--4-0bd15808a. # 𞤺ߌ4. +B; 𞤘\u07CC4.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. +B; xn--4-0bd15808a.xn--1ug; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +B; ⒗\u0981\u20EF-.\u08E2•; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒗ঁ⃯-.• +B; 16.\u0981\u20EF-.\u08E2•; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 16.ঁ⃯-.• +B; 16.xn----z0d801p.xn--l0b810j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 16.ঁ⃯-.• +B; xn----z0d801p6kd.xn--l0b810j; [B1 V3 V6]; [B1 V3 V6] # ⒗ঁ⃯-.• +B; -。䏛; [V3]; [V3] +B; -。䏛; [V3]; [V3] +B; -.xn--xco; [V3]; [V3] +T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . +N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . +T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . +N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . +B; xn--dj8y.; [V6]; [V6] +B; xn--0ugz7551c.xn--1ug; [C1 C2 V6]; [C1 C2 V6] # . +T; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠Ⴕ +N; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠Ⴕ +T; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠Ⴕ +N; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠Ⴕ +T; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠ⴕ +N; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠ⴕ +B; 1.xn--svh00804k.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] +B; 1.xn--svh00804k.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠ⴕ +B; 1.xn--svh00804k.xn--tnd1990ke579c; [B1 V6]; [B1 V6] +B; 1.xn--svh00804k.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠Ⴕ +T; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠ⴕ +N; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠ⴕ +B; xn--tsh0nz9380h.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] +B; xn--tsh0nz9380h.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠ⴕ +B; xn--tsh0nz9380h.xn--tnd1990ke579c; [B1 V6]; [B1 V6] +B; xn--tsh0nz9380h.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠Ⴕ +T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; xn---ss-21t18904a.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; xn----qfa310pg973b.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ß.᳐効 +B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𑇀.󠨱; [P1 V5 V6]; [P1 V5 V6] +B; xn--wd1d.xn--k946e; [V5 V6]; [V5 V6] +B; ␒3\uFB88。𝟘𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 +B; ␒3\u0688。0𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 +B; xn--3-jsc897t.xn--0-sc5iy3h; [B1 V6]; [B1 V6] # ␒3ڈ.0𐨿 +B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +B; xn--6-h5c06gj6c.xn--7eg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +T; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -Ⴂ. +N; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -Ⴂ. +T; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -ⴂ. +N; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -ⴂ. +B; xn----0kc8501a5399e.xn--ss06b; [B1 V6]; [B1 V6] # -ⴂ. +B; xn----0kc8501a5399e.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -ⴂ. +B; xn----0kc662fc152h.xn--ss06b; [B1 V6]; [B1 V6] # -Ⴂ. +B; xn----0kc662fc152h.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -Ⴂ. +T; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] +N; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] +T; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +N; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +T; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] +N; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] +T; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +N; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯SS; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯Ss; [P1 V6]; [P1 V6] +B; xn--tc1a.xn--5ss-3m2a5009e; [V6]; [V6] +B; xn--tc1a.xn--5-qfa988w745i; [V6]; [V6] +B; ⾆.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯SS; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯Ss; [P1 V6]; [P1 V6] +T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ +N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ +T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ +N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ +B; xn--8-yke.xn--gpb79046m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ્8.ݜ +B; xn--8-yke534n.xn--gpb79046m; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ્8.ݜ +B; 򸷆\u0A70≮򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70<\u0338򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70≮򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70<\u0338򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; xn--ycc893jqh38rb6fa.xn--7-5uc53836ixt41c; [B5 B6 V6]; [B5 B6 V6] # ੰ≮.7ڶ +T; 𞤪.ς; ; xn--ie6h.xn--4xa +N; 𞤪.ς; ; xn--ie6h.xn--3xa +B; 𞤈.Σ; 𞤪.σ; xn--ie6h.xn--4xa +B; 𞤪.σ; ; xn--ie6h.xn--4xa +B; 𞤈.σ; 𞤪.σ; xn--ie6h.xn--4xa +B; xn--ie6h.xn--4xa; 𞤪.σ; xn--ie6h.xn--4xa +T; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--4xa +N; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--3xa +B; xn--ie6h.xn--3xa; 𞤪.ς; xn--ie6h.xn--3xa +B; 𞤪.Σ; 𞤪.σ; xn--ie6h.xn--4xa +T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς +N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς +T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς +N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς +T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς +N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς +T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ +N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ +T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ +N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ +B; xn--ilj.xn--4xa; ⴚ.σ; xn--ilj.xn--4xa +B; ⴚ.σ; ; xn--ilj.xn--4xa +B; Ⴚ.Σ; [P1 V6]; [P1 V6] +T; ⴚ.ς; ; xn--ilj.xn--4xa +N; ⴚ.ς; ; xn--ilj.xn--3xa +T; Ⴚ.ς; [P1 V6]; [P1 V6] +N; Ⴚ.ς; [P1 V6]; [P1 V6] +B; xn--ynd.xn--4xa; [V6]; [V6] +B; xn--ynd.xn--3xa; [V6]; [V6] +B; xn--ilj.xn--3xa; ⴚ.ς; xn--ilj.xn--3xa +B; Ⴚ.σ; [P1 V6]; [P1 V6] +B; xn--0ug262c.xn--4xa; [C1]; [C1] # ⴚ.σ +B; xn--ynd759e.xn--4xa; [C1 V6]; [C1 V6] # Ⴚ.σ +B; xn--0ug262c.xn--3xa; [C1]; [C1] # ⴚ.ς +B; xn--ynd759e.xn--3xa; [C1 V6]; [C1 V6] # Ⴚ.ς +T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς +N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς +T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ +N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ +T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ +N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ +B; 𞤃.𐹦; [B1]; [B1] +B; 𞤃.𐹦; [B1]; [B1] +B; 𞤥.𐹦; [B1]; [B1] +B; xn--de6h.xn--eo0d; [B1]; [B1] +B; 𞤥.𐹦; [B1]; [B1] +T; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +B; xn--6g3a.xn--0sa8175flwa; [V5]; [V5] # 谷.꥓̐ꡎ +B; xn--1ug0273b.xn--0sa359l6n7g13a; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +B; xn----guc3592k.xn--qe6h; [B2 B3]; [B2 B3] # ڪ-뉔.𞤲 +B; xn----guc3592k.xn--0ug7611p; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; xn--5-0mb988ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; xn--5-ymb298ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 淽。ᠾ; 淽.ᠾ; xn--34w.xn--x7e +B; xn--34w.xn--x7e; 淽.ᠾ; xn--34w.xn--x7e +B; 淽.ᠾ; ; xn--34w.xn--x7e +B; 𐹴𑘷。-; [B1 V3]; [B1 V3] +B; xn--so0do6k.-; [B1 V3]; [B1 V3] +B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; xn--8di78qvw32y.xn--k80d; [V5 V6]; [V5 V6] +B; xn--rnd896i0j14q.xn--k80d; [V5 V6]; [V5 V6] +B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +B; xn--ykj9323eegwf.xn--ss-151a; [B1]; [B1] +B; xn--0ug332c3q0pr56g.xn--ss-151a; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +B; xn--fnd1201kegrf.xn--ss-fek; [B1 V6]; [B1 V6] +B; xn--fnd599eyj4pr50g.xn--ss-fek; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssႣ +B; xn--0ug332c3q0pr56g.xn--zca417t; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +B; xn--fnd599eyj4pr50g.xn--zca681f; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +B; xn--fnd1201kegrf.xn--ss-151a; [B1 V6]; [B1 V6] +B; xn--fnd599eyj4pr50g.xn--ss-151a; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . +B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . +B; xn--45e.xn--et6h; [V6]; [V6] # . +T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 +N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 +T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 +N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 +B; xn--uhb.xn--8tc4527k; [V5]; [V5] # ْ.್𑚳 +B; xn--uhb882k.xn--8tc4527k; [C2 V5]; [C2 V5] # ْ.್𑚳 +B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; xn----g6j886c.xn--xpb049kk353abj99f; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; xn--zrb797kdm1oes34i.xn--bwb394k8k2o25n6d; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ≯.≮𑁆ࡌ +B; ≠󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ +B; =\u0338󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ +B; xn--1ch22084l.xn----qkc07co6n; [B1 V6]; [B1 V6] # ≠.்-ڹ +B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; xn--54e694cn389z.xn--787ct8r; [B1 V5 V6]; [B1 V5 V6] # ៝≠.𐹼𐋤 +T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] +B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] +B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] +B; xn--ss-jl59biy67d.xn--ss-4d11aw87d; [V5 V6]; [V5 V6] +B; xn--zca20040bgrkh.xn--zca3653v86qa; [V5 V6]; [V5 V6] +B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] +B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] +B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] +T; \u200D。\u200C; [C1 C2]; [A4_2] # . +N; \u200D。\u200C; [C1 C2]; [C1 C2] # . +B; xn--1ug.xn--0ug; [C1 C2]; [C1 C2] # . +T; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ҃𐭞.ឹ +N; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ҃𐭞.ឹ +B; xn--m3a6965k.xn--43e8670vmd79b; [B1 V5 V6]; [B1 V5 V6] # ҃𐭞.ឹ +B; xn--m3a412lrr0o.xn--43e8670vmd79b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ҃𐭞.ឹ +T; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # 𐠨临.ꡢⶏ𐹣 +N; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐠨临.ꡢⶏ𐹣 +B; xn--miq9646b.xn--uojv340bk71c99u9f; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] +B; xn--0uga2656aop9k.xn--uojv340bk71c99u9f; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐠨临.ꡢⶏ𐹣 +B; 󠑘.󠄮; [P1 V6]; [P1 V6] +B; 󠑘.󠄮; [P1 V6]; [P1 V6] +B; xn--s136e.; [V6]; [V6] +B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; xn--wxc7880k.xn--2v9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ +B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ +B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. +B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. +B; xn--ym9av13acp85w.20.xn--d846e; [V5 V6]; [V5 V6] # ꦷ멹.20. +B; xn--ym9av13acp85w.xn--dth22121k; [V5 V6]; [V5 V6] # ꦷ멹.⒛ +B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ +B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ +B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. +B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. +B; xn--wkj8016bne45io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳3. +B; xn--dnd2167fnet0io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ +B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ +B; xn--wkj8016bne45io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳⒊ +B; xn--dnd2167fnet0io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳⒊ +T; \u200C。︒; [C1 P1 V6]; [P1 V6 A4_2] # .︒ +N; \u200C。︒; [C1 P1 V6]; [C1 P1 V6] # .︒ +T; \u200C。。; [C1 A4_2]; [A4_2] # .. +N; \u200C。。; [C1 A4_2]; [C1 A4_2] # .. +B; ..; [A4_2]; [A4_2] +B; xn--0ug..; [C1 A4_2]; [C1 A4_2] # .. +B; .xn--y86c; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--y86c; [C1 V6]; [C1 V6] # .︒ +B; ≯\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; >\u0338\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; ≯\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; >\u0338\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; xn--xpb149k.4; [B1 V6]; [B1 V6] # ≯ݭ.4 +T; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- +N; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-3.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- +N; ᡲ-3.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-3.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-3.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-3.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; xn---3-p9o.ss--; [V2 V3]; [V2 V3] +B; xn---3-p9o.xn--ss---276a; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; xn---3-p9o.xn-----fia9303a; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; \uFD08𝟦\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ +B; \u0636\u064A4\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ +B; \u0636\u064A4\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ +B; xn--4-tnc6ck183523b.xn--s5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.ӏ +B; xn--4-tnc6ck183523b.xn--d5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.Ӏ +B; \uFD08𝟦\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ +B; -.\u0602\u0622𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 +B; -.\u0602\u0627\u0653𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 +B; -.xn--kfb8dy983hgl7g; [B1 V3 V6]; [B1 V3 V6] # -.آ𑆾🐹 +B; 󙶜ᢘ。\u1A7F⺢; [P1 V5 V6]; [P1 V5 V6] # ᢘ.᩿⺢ +B; xn--ibf35138o.xn--fpfz94g; [V5 V6]; [V5 V6] # ᢘ.᩿⺢ +B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; xn--vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; XN--VND619AS6IG6K.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; Xn--Vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; xn--66e353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; XN--66E353CE0ILB.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; Xn--66E353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; xn--jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; XN--JNDX718CNNL.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; Xn--Jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; \u0667.𐥨; [B1 P1 V6]; [B1 P1 V6] # ٧. +B; xn--gib.xn--vm9c; [B1 V6]; [B1 V6] # ٧. +T; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ +N; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ +T; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ +N; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ +B; xn--3-5z4e.xn--1zfz754hncv8b; [B5 V5 V6]; [B5 V5 V6] # ꧀3.𐹪᯳ +B; xn--3-5z4e.xn--1zf96ony8ygd68c; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ꧀3.𐹪᯳ +B; 򣕄4񠖽.≯\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 +B; 򣕄4񠖽.>\u0338\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 +B; xn--4-fg85dl688i.xn--dib174li86ntdy0i; [B1 V6]; [B1 V6] # 4.≯٤𑀾 +B; 򗆧𝟯。⒈\u1A76𝟚򠘌; [P1 V6]; [P1 V6] # 3.⒈᩶2 +B; 򗆧3。1.\u1A762򠘌; [P1 V5 V6]; [P1 V5 V6] # 3.1.᩶2 +B; xn--3-rj42h.1.xn--2-13k96240l; [V5 V6]; [V5 V6] # 3.1.᩶2 +B; xn--3-rj42h.xn--2-13k746cq465x; [V6]; [V6] # 3.⒈᩶2 +T; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 +N; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 +T; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 +N; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 +T; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 +N; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 +T; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 +N; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 +B; 51..xn--8-ogo; [V6 A4_2]; [V6 A4_2] +B; xn--51-l1t..xn--8-ugn00i; [C2 V6 A4_2]; [C2 V6 A4_2] # 51..≯8 +B; xn--5-ecp.xn--8-ogo; [V6]; [V6] +B; xn--5-tgnz5r.xn--8-ugn00i; [C2 V6]; [C2 V6] # 5⒈.≯8 +T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +B; xn--tjb002cn51k.xn--5nb630lbj91q; [B5 B6 V6]; [B5 B6 V6] # ꡰڗႆ.ܯ≠ +B; xn--tjb002cn51k.xn--5nb448jcubcz547b; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꡰڗႆ.ܯ≠ +B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] +B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] +B; xn--t80d.xn--to0d14792b; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] +B; 𝟥\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ +B; 3\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ +B; xn--3-rkc.xn--kob; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 3.ܽ +B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; xn--2gb8gu829f.xn--xpb0156f; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; ︒Ↄ\u2DE7򾀃.Ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒Ↄⷧ.Ⴗ +B; 。Ↄ\u2DE7򾀃.Ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .Ↄⷧ.Ⴗ +B; 。ↄ\u2DE7򾀃.ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .ↄⷧ.ⴗ +B; .xn--r5gy00cll06u.xn--flj4541e; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ↄⷧ.ⴗ +B; .xn--q5g000cll06u.xn--vnd8618j; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .Ↄⷧ.Ⴗ +B; ︒ↄ\u2DE7򾀃.ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒ↄⷧ.ⴗ +B; xn--r5gy00c056n0226g.xn--flj4541e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒ↄⷧ.ⴗ +B; xn--q5g000c056n0226g.xn--vnd8618j; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒Ↄⷧ.Ⴗ +B; \u0600.\u05B1; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # .ֱ +B; xn--ifb.xn--8cb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # .ֱ +T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--4xa818m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] +B; xn--3xa028m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] +B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +N; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; xn--jpb535f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; xn--jpb535fv9f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; 𾷂\u0A42Ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂႪ.≮ +B; 𾷂\u0A42Ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂႪ.≮ +B; 𾷂\u0A42ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂⴊ.≮ +B; 𾷂\u0A42ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂⴊ.≮ +B; xn--nbc229o4y27dgskb.xn--gdh; [V6]; [V6] # ੂⴊ.≮ +B; xn--nbc493aro75ggskb.xn--gdh; [V6]; [V6] # ੂႪ.≮ +B; ꡠ.۲; ꡠ.۲; xn--5c9a.xn--fmb +B; ꡠ.۲; ; xn--5c9a.xn--fmb +B; xn--5c9a.xn--fmb; ꡠ.۲; xn--5c9a.xn--fmb +B; 𐹣񄷄。ꡬ🄄; [B1 P1 V6]; [B1 P1 V6] +B; 𐹣񄷄。ꡬ3,; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--bo0d0203l.xn--3,-yj9h; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--bo0d0203l.xn--id9a4443d; [B1 V6]; [B1 V6] +T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് +N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് +T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് +N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് +B; xn----x6e0220sclug.xn--wxc; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] # -్𑲓.് +B; xn----x6e0220sclug.xn--wxc317g; [B1 C2 V3 V6]; [B1 C2 V3 V6] # -్𑲓.് +T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +T; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ +N; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--5,-op8g373c.xn--4sf0725i; [P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--5,-i1tz135dnbqa.xn--4sf36u6u4w; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--2q5a751a653w.xn--4sf0725i; [V5 V6]; [V5 V6] # ꙽霣🄆.𑁂ᬁ +B; xn--0ug4208b2vjuk63a.xn--4sf36u6u4w; [C1 V5 V6]; [C1 V5 V6] # ꙽霣🄆.𑁂ᬁ +B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] +B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] +B; xn--b5q.xn--v7e6041kqqd4m251b; [V6]; [V6] +T; 𝟙。\u200D𝟸\u200D⁷; [C2]; 1.27 # 1.27 +N; 𝟙。\u200D𝟸\u200D⁷; [C2]; [C2] # 1.27 +T; 1。\u200D2\u200D7; [C2]; 1.27 # 1.27 +N; 1。\u200D2\u200D7; [C2]; [C2] # 1.27 +B; 1.27; ; +B; 1.xn--27-l1tb; [C2]; [C2] # 1.27 +B; ᡨ-。󠻋𝟷; [P1 V3 V6]; [P1 V3 V6] +B; ᡨ-。󠻋1; [P1 V3 V6]; [P1 V3 V6] +B; xn----z8j.xn--1-5671m; [V3 V6]; [V3 V6] +B; 𑰻񵀐𐫚.\u0668⁹; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 +B; 𑰻񵀐𐫚.\u06689; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 +B; xn--gx9cr01aul57i.xn--9-oqc; [B1 V5 V6]; [B1 V5 V6] # 𑰻𐫚.٨9 +T; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ +N; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ +T; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ +N; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ +T; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ +N; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed372mdj2do3v4h.xn--e5h11w; [V6]; [V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed372mdj2do3v4h.xn--0uga678bgyh; [C1 V6]; [C1 V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed54dz10wo343g.xn--nnd651i; [V6]; [V6] # Ⴜྀ舛.Ⴏ♀ +B; xn--zed54dz10wo343g.xn--nnd089ea464d; [C1 V6]; [C1 V6] # Ⴜྀ舛.Ⴏ♀ +T; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ +N; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ +T; 𑁆𝟰.\u200D; [C2 V5]; [V5] # 𑁆4. +N; 𑁆𝟰.\u200D; [C2 V5]; [C2 V5] # 𑁆4. +T; 𑁆4.\u200D; [C2 V5]; [V5] # 𑁆4. +N; 𑁆4.\u200D; [C2 V5]; [C2 V5] # 𑁆4. +B; xn--4-xu7i.; [V5]; [V5] +B; xn--4-xu7i.xn--1ug; [C2 V5]; [C2 V5] # 𑁆4. +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +B; xn--mlju35u7qx2f.xn--et3bn23n; [V5 V6]; [V5 V6] +B; xn--mlju35u7qx2f.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # ⴞ癀.𑘿붼 +B; xn--2nd6803c7q37d.xn--et3bn23n; [V5 V6]; [V5 V6] +B; xn--2nd6803c7q37d.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +B; 󚀅-\u0BCD。\u06B9; [B6 P1 V6]; [B6 P1 V6] # -்.ڹ +B; xn----mze84808x.xn--skb; [B6 V6]; [B6 V6] # -்.ڹ +B; ᡃ𝟧≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ𝟧>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ5≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ5>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; xn--5-24jyf768b.xn--lqw213ime95g; [V6]; [V6] +B; 𐹬𝩇.\u0F76; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; xn--ko0d8295a.xn--zed3h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; -𑈶⒏.⒎𰛢󠎭; [P1 V3 V6]; [P1 V3 V6] +B; -𑈶8..7.𰛢󠎭; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; xn---8-bv5o..7.xn--c35nf1622b; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn----scp6252h.xn--zshy411yzpx2d; [V3 V6]; [V3 V6] +T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +B; xn--skjy82u.xn--gdh; [V6]; [V6] +B; xn--0ugc160hb36e.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # ⴁ畝.≮ +B; xn--8md0962c.xn--gdh; [V6]; [V6] +B; xn--8md700fea3748f.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # Ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +B; xn--nmw.xn--hdh7804gdms2h; [B1 V6]; [B1 V6] +B; xn--nmw.xn--1ugx6gs128a1134j; [B1 C2 V6]; [B1 C2 V6] # 歷.𐹻≯ +T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 +N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 +T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 +N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 +B; xn--t8c.xn--iz4a43209d; [V5 V6]; [V5 V6] # ໋.鎁 +B; xn--t8c059f.xn--iz4a43209d; [C2 V5 V6]; [C2 V5 V6] # ໋.鎁 +T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +B; xn--9d6h.xn--wh0dj799f; [B5 B6 V6]; [B5 B6 V6] +B; xn--0ugb45126a.xn--wh0dj799f; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # 𞤢. +T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +N; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +T; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +N; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +T; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +N; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +T; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +N; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +B; \u0628=\u03389-.Σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628≠9-.Σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628≠9-.σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628=\u03389-.σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; xn--9--etd0100a.xn--6-zmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.σ6.𐹦≠ +B; xn--9--etd0100a.xn--6-xmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.ς6.𐹦≠ +B; \u0628=\u0338𝟫-.Σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628≠𝟫-.Σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628≠𝟫-.σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628=\u0338𝟫-.σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; xn--9--etd0100a.xn--4xa887mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; xn--9--etd0100a.xn--3xa097mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.ς⒍𐹦≠ +B; 򉛴.-ᡢ\u0592𝨠; [P1 V3 V6]; [P1 V3 V6] # .-ᡢ֒𝨠 +B; xn--ep37b.xn----hec165lho83b; [V3 V6]; [V3 V6] # .-ᡢ֒𝨠 +T; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- +N; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- +T; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- +N; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- +B; \u06CB1.SS󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; \u06CB1.ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; \u06CB1.Ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; xn--1-cwc.ss.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ss.- +B; xn--1-cwc.xn--zca.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ß.- +B; \u06CB⒈SS󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; \u06CB⒈ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; \u06CB⒈Ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; xn--ss-d7d6651a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ss.- +B; xn--zca541ato3a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ß.- +T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ +N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ +N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ +N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ +T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ +N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ +T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa153j7im; [V5 V6]; [V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa153jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa217dxri; [V5 V6]; [V5 V6] # .᮪σႦ +B; xn--nu4s.xn--4xa217dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪σႦ +B; xn--nu4s.xn--3xa353jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪ςⴆ +B; xn--nu4s.xn--3xa417dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ +N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ +T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ +N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ +T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +B; ⾆\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 +B; 舌\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 +B; xn--l0b9413d.xn--kl1h; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 舌.𝈴 +B; ⫞𐹶𖫴。⭠⒈; [B1 P1 V6]; [B1 P1 V6] +B; ⫞𐹶𖫴。⭠1.; [B1]; [B1] +B; xn--53ix188et88b.xn--1-h6r.; [B1]; [B1] +B; xn--53ix188et88b.xn--tsh52w; [B1 V6]; [B1 V6] +T; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [P1 V5 V6] # ⒈ꫬ︒.્ +N; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⒈ꫬ︒.્ +T; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ +N; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ +B; 1.xn--sv9a..xn--mfc; [V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ +B; 1.xn--0ug7185c..xn--mfc; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ +B; xn--tsh0720cse8b.xn--mfc; [V5 V6]; [V5 V6] # ⒈ꫬ︒.્ +B; xn--0ug78o720myr1c.xn--mfc; [C1 V5 V6]; [C1 V5 V6] # ⒈ꫬ︒.્ +B; \u0C46。䰀\u0668𞭅󠅼; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ె.䰀٨ +B; xn--eqc.xn--hib5476aim6t; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ె.䰀٨ +T; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ß.᯲ +N; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ß.᯲ +T; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +T; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +T; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +B; ss.xn--0zf22107b; [V5 V6]; [V5 V6] # ss.᯲ +B; xn--ss-n1t.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ss.᯲ +B; xn--zca870n.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ß.᯲ +T; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +N; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +T; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +N; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +B; xn--gdhz656g.xn--gdh; [V5 V6]; [V5 V6] +B; xn--0ugy6glz29a.xn--gdh; [V5 V6]; [V5 V6] # 𑓂≮.≮ +B; 🕼.\uFFA0; [P1 V6]; [P1 V6] # 🕼. +B; 🕼.\u1160; [P1 V6]; [P1 V6] # 🕼. +B; xn--my8h.xn--psd; [V6]; [V6] # 🕼. +B; xn--my8h.xn--cl7c; [V6]; [V6] # 🕼. +B; ᡔ\uFD82。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. +B; ᡔ\u0644\u062D\u0649。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. +B; xn--sgb9bq785p.xn--bc31b; [B5 B6 V6]; [B5 B6 V6] # ᡔلحى. +B; 爕򳙑.𝟰気; [P1 V6]; [P1 V6] +B; 爕򳙑.4気; [P1 V6]; [P1 V6] +B; xn--1zxq3199c.xn--4-678b; [V6]; [V6] +B; ⒋𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ +B; 4.𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ +B; 4.𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ +B; 4.xn----wwsx259f.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ +B; 4.xn----t1g9869q.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ +B; ⒋𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍ⴝ-.්ֵ +B; xn----jcp487avl3w.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍ⴝ-.්ֵ +B; xn----t1g323mnk9t.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ +B; 󞝃。򑆃񉢗--; [P1 V2 V3 V6]; [P1 V2 V3 V6] +B; xn--2y75e.xn-----1l15eer88n; [V2 V3 V6]; [V2 V3 V6] +T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ +N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ +N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +B; xn--6sb.xn--429a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ߟ.꯭ +B; xn--6sb394j.xn--0ug1126c; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 +B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 +B; xn--3tb2nz468k.xn--69e8615j5rn5d; [B5 B6 V6]; [B5 B6 V6] # ࡎ.ᢍ𐫘 +B; \u06ED𞺌𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +B; \u06ED\u0645𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +B; xn--hhb94ag41b739u.xn--dzb5582f; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; xn--3sb7483hoyvbbe76g.xn--4xaa21q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼσ +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +B; xn--3sb7483hoyvbbe76g.xn--3xab31q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼς +B; xn--3sb7483hoyvbbe76g.xn--3xaa51q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.ςؼς +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +B; 蔰。󠁹\u08DD-𑈵; [P1 V6]; [P1 V6] # 蔰.ࣝ-𑈵 +B; xn--sz1a.xn----mrd9984r3dl0i; [V6]; [V6] # 蔰.ࣝ-𑈵 +T; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ +N; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ +T; ςⴥ。\u075A; ςⴥ.\u075A; xn--4xa203s.xn--epb # ςⴥ.ݚ +N; ςⴥ。\u075A; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ +B; ΣჅ。\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ +B; σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; Σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; xn--4xa203s.xn--epb; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; σⴥ.\u075A; ; xn--4xa203s.xn--epb # σⴥ.ݚ +B; ΣჅ.\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ +B; Σⴥ.\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; xn--4xa477d.xn--epb; [V6]; [V6] # σჅ.ݚ +B; xn--3xa403s.xn--epb; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ +T; ςⴥ.\u075A; ; xn--4xa203s.xn--epb # ςⴥ.ݚ +N; ςⴥ.\u075A; ; xn--3xa403s.xn--epb # ςⴥ.ݚ +B; xn--3xa677d.xn--epb; [V6]; [V6] # ςჅ.ݚ +B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ +B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ +B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ +B; xn--lqc478nlr02a.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్ⴉ.᭲ +B; xn--lqc64t7t26c.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్Ⴉ.᭲ +B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ +B; ⮷≮񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] +B; ⮷<\u0338񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] +B; xn--gdh877a3513h.xn--pc9c; [B1 V6]; [B1 V6] +T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +B; xn--vkb.xn--08e172a; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.ẏᡤ; ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.Y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.Ẏᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; xn--vkb.xn--08e172ax6aca; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +B; 𐹹𑲛。񑂐\u0DCA; [B1 P1 V6]; [B1 P1 V6] # 𐹹𑲛.් +B; xn--xo0dg5v.xn--h1c39876d; [B1 V6]; [B1 V6] # 𐹹𑲛.් +B; -≠𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -=\u0338𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -≠𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -=\u0338𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; xn----ufo4749h.xn--mhb45a235sns3c; [B1 B5 V3 V6]; [B1 B5 V3 V6] # -≠𑈵.嵕ي۴꥓ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +B; xn--ypb5875khz9y.xn--0kb682l; [B3 B5 B6 V6]; [B3 B5 B6 V6] # 𐹶ݮ.ہ≯ +B; xn--ypb717jrx2o7v94a.xn--0kb660ka35v; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # 𐹶ݮ.ہ≯ +B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; xn--gdh.xn--kwb589e217p; [B1 V5 V6]; [B1 V5 V6] # ≮.ࡕ𐫔 +T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ +N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ +N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ +N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ +N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ +B; xn--pt9c.xn--hnd666l; [V6]; [V6] +B; xn--1ug4933g.xn--hnd666l; [B3 C2 V6]; [B3 C2 V6] # 𐩗.Ⴉⴕ +B; xn--pt9c.xn--0kjya; 𐩗.ⴉⴕ; xn--pt9c.xn--0kjya; NV8 +B; 𐩗.ⴉⴕ; ; xn--pt9c.xn--0kjya; NV8 +B; 𐩗.ႩႵ; [P1 V6]; [P1 V6] +B; 𐩗.Ⴉⴕ; [P1 V6]; [P1 V6] +B; xn--pt9c.xn--hndy; [V6]; [V6] +B; xn--1ug4933g.xn--0kjya; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +B; xn--1ug4933g.xn--hndy; [B3 C2 V6]; [B3 C2 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ +N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ +N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ +T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ +N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ +T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ +N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ +B; xn--1fk.xn--vta284a9o563a; [V5 V6]; [V5 V6] # ㄤ.̮ূ +B; xn--0uga242k.xn--vta284a9o563a; [C1 V5 V6]; [C1 V5 V6] # ㄤ.̮ূ +T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ +N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +B; xn--v97c.xn----lws0526f; [B1 V3]; [B1 V3] +B; xn--v97c.xn----sgnv20du99s; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +B; xn--v97c.xn----i1g2513q; [B1 V3 V6]; [B1 V3 V6] +B; xn--v97c.xn----i1g888ih12u; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ +N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +B; xn--bl0dh970b.xn--1ch; [B1 V6]; [B1 V6] +B; xn--bl0dh970b.xn--0ug83g; [B1 C1 V6]; [B1 C1 V6] # 🙑.≠ +B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ +B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ +B; xn--ohb646i.xn--ewi38jf765c; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٌ᳒.⵿⧎ +B; Ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ +B; Ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ +B; ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ +B; xn--3-ews6985n35s3g.xn--7-cve6271r; [V6]; [V6] # ⴔ𝨨3.7𑂹ஂ +B; xn--3-b1g83426a35t0g.xn--7-cve6271r; [V6]; [V6] # Ⴔ𝨨3.7𑂹ஂ +B; ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ +T; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [P1 V6] # 䏈.⒈ +N; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.⒈ +T; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [P1 V6] # 䏈.1. +N; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.1. +B; xn--eco.1.xn--ms39a; [V6]; [V6] +B; xn--0ug491l.xn--1-rgn.xn--ms39a; [C1 V6]; [C1 V6] # 䏈.1. +B; xn--eco.xn--tsh21126d; [V6]; [V6] +B; xn--0ug491l.xn--0ug88oot66q; [C1 V6]; [C1 V6] # 䏈.⒈ +T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; xn--1ss-ir6ln166b.xn--weg; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; xn--1-qfa2471kdb0d.xn--weg; [V5]; [V5] # 1꫶ß𑲥.ᷘ +B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +T; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ⒈ +N; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ⒈ +T; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ1. +N; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ1. +B; xn--8tc9875v5is1a.xn--1-g6c.; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ1. +B; xn--8tc969gzn94a4lm8a.xn--1-g6c.; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ1. +B; xn--8tc9875v5is1a.xn--dqb689l; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ⒈ +B; xn--8tc969gzn94a4lm8a.xn--dqb689l; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ⒈ +B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; xn--zqf.xn--ysb9657vuiz5bj0ep; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u0842𞩚⒈.󠬌8򏳏\u0770; [B1 P1 V6]; [B1 P1 V6] # ࡂ⒈.8ݰ +B; \u0842𞩚1..󠬌8򏳏\u0770; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡂ1..8ݰ +B; xn--1-rid26318a..xn--8-s5c22427ox454a; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡂ1..8ݰ +B; xn--0vb095ldg52a.xn--8-s5c22427ox454a; [B1 V6]; [B1 V6] # ࡂ⒈.8ݰ +B; \u0361𐫫\u0369ᡷ。-󠰛鞰; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 +B; xn--cvaq482npv5t.xn----yg7dt1332g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 +T; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +N; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +B; -.\u0ACD剘SS𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.\u0ACD剘ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.\u0ACD剘Ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.xn--ss-bqg4734erywk; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.xn--zca791c493duf8i; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- +B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- +B; xn--b1b2719v.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ࣻ.- +B; ⒈󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 +B; ⒈󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 +B; 1.󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 +B; 1.󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 +B; 1.xn--qo0dl3077c.xn--lfb536lb35n; [B1 V6]; [B1 V6] # 1.𐹲.≠𐹽 +B; xn--tshw766f1153g.xn--lfb536lb35n; [B1 V6]; [B1 V6] # ⒈𐹲.≠𐹽 +T; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢Ⴎ.㖾𐹡 +N; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢Ⴎ.㖾𐹡 +T; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢ⴎ.㖾𐹡 +N; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢ⴎ.㖾𐹡 +B; xn--5kjx323em053g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug342clq0pqxv4i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢ⴎ.㖾𐹡 +B; xn--mnd9001km0o0g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--mnd289ezj4pqxp0i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢Ⴎ.㖾𐹡 +B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ +B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ +B; xn--te28c.xn--isb286btrgo7w; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳⴇ +B; xn--te28c.xn--isb295fbtpmb; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖⴓⴇ +B; xn--te28c.xn--isb856b9a631d; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ +B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ +T; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [P1 V6] # .ڳݵ +N; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .ڳݵ +B; xn--3j78f.xn--mkb20b; [V6]; [V6] # .ڳݵ +B; xn--1ug39444n.xn--mkb20b; [B1 C2 V6]; [B1 C2 V6] # .ڳݵ +B; 𲤱⒛⾳.ꡦ⒈; [P1 V6]; [P1 V6] +B; 𲤱20.音.ꡦ1.; [P1 V6]; [P1 V6] +B; xn--20-9802c.xn--0w5a.xn--1-eg4e.; [V6]; [V6] +B; xn--dth6033bzbvx.xn--tsh9439b; [V6]; [V6] +B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ +B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ +B; xn--8--rve13079p.xn--b7b9842k42df776x; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # ߜ8-.𑁿𐩥্ +T; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +N; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +T; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +N; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +T; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +N; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +T; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +N; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +B; Ⴕ。۰≮SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; Ⴕ。۰<\u0338SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; ⴕ。۰<\u0338ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ +B; ⴕ。۰≮ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ +B; Ⴕ。۰≮Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; Ⴕ。۰<\u0338Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; xn--tnd.xn--ss-jbe65aw27i; [V6]; [V6] # Ⴕ.۰≮ss݅ +B; xn--dlj.xn--ss-jbe65aw27i; [V6]; [V6] # ⴕ.۰≮ss݅ +B; xn--dlj.xn--zca912alh227g; [V6]; [V6] # ⴕ.۰≮ß݅ +B; xn--tnd.xn--zca912alh227g; [V6]; [V6] # Ⴕ.۰≮ß݅ +B; \u07E9-.𝨗꒱\u1B72; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ +B; xn----odd.xn--dwf8994dc8wj; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ +T; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ +N; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ +T; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ +N; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ +B; xn--sn7h.xn--hdh754ax6w; [B1 V6]; [B1 V6] +B; xn--0ugx453p.xn--hdh754ax6w; [B1 B3 C1 V6]; [B1 B3 C1 V6] # .≯䕵⫧ +T; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ +N; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ +T; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +N; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +B; 𐨅SS\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; 𐨅ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; 𐨅Ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; xn--ss-ytd5i7765l.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; xn--zca23yncs877j.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +B; 𐨅SS\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; 𐨅ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; 𐨅Ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; xn--ss-ytd5i7765l.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; xn--zca23yncs877j.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ßيخ.ڬ۳︒ +B; -≮🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ +B; -<\u0338🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ +B; -<\u0338🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ +B; -≮🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ +B; xn----44l04zxt68c.xn--enb135qf106f; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.ⴁܔ +B; xn----44l04zxt68c.xn--enb300c1597h; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.Ⴁܔ +T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +B; xn--ge6h.xn--oc9a; 𞤨.ꡏ; xn--ge6h.xn--oc9a +B; 𞤨.ꡏ; ; xn--ge6h.xn--oc9a +B; 𞤆.ꡏ; 𞤨.ꡏ; xn--ge6h.xn--oc9a +B; xn--ge6h.xn--0ugb9575h; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; xn--b50d.xn--iib993gyp5p; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; Ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] +B; xn--73g39298c.xn--hdhz171b; [V6]; [V6] +B; xn--f3g73398c.xn--hdhz171b; [V6]; [V6] +B; ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] +T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- +N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- +N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +B; xn--4xa25ks2j.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # σු٠.- +B; xn--4xa25ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +B; xn--3xa45ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; \u200C.ßႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ßႩ- +N; \u200C.ßႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ßႩ- +T; \u200C.ßⴉ-; [C1 V3]; [V3 A4_2] # .ßⴉ- +N; \u200C.ßⴉ-; [C1 V3]; [C1 V3] # .ßⴉ- +T; \u200C.SSႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ssႩ- +N; \u200C.SSႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ssႩ- +T; \u200C.ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- +N; \u200C.ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- +T; \u200C.Ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- +N; \u200C.Ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- +B; .xn--ss--bi1b; [V3 A4_2]; [V3 A4_2] +B; xn--0ug.xn--ss--bi1b; [C1 V3]; [C1 V3] # .ssⴉ- +B; .xn--ss--4rn; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn--0ug.xn--ss--4rn; [C1 V3 V6]; [C1 V3 V6] # .ssႩ- +B; xn--0ug.xn----pfa2305a; [C1 V3]; [C1 V3] # .ßⴉ- +B; xn--0ug.xn----pfa042j; [C1 V3 V6]; [C1 V3 V6] # .ßႩ- +B; 󍭲𐫍㓱。⾑; [B5 P1 V6]; [B5 P1 V6] +B; 󍭲𐫍㓱。襾; [B5 P1 V6]; [B5 P1 V6] +B; xn--u7kt691dlj09f.xn--9v2a; [B5 V6]; [B5 V6] +T; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ +N; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ +T; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ +N; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ +B; xn--2jb053lf13nyoc.xn--hdh08821l; [B1 B3 V6]; [B1 B3 V6] # ڠ𐮋𐹰≮.≯ +B; xn--2jb053lf13nyoc.xn--1ugx6gc8096c; [B1 B3 C2 V6]; [B1 B3 C2 V6] # ڠ𐮋𐹰≮.≯ +B; 𝟞。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ +B; 6。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ +B; 6.xn--7pb04do15eq748f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 6.ݷࢰ⩋ +B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; -\u0634\u0649。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; xn----qnc7d.xn--wd1d62a; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +T; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 +N; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 +T; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 +N; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 +B; xn--1-f521m.xn--mxc0872kcu37dnmem; [B1 V5 V6]; [B1 V5 V6] # 1.ൃ𐹬 +B; xn--1-rgnu0071n.xn--mxc0872kcu37dnmem; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.ൃ𐹬 +T; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.ss +N; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.xn--zca +T; 齙--4.ß; ; xn----4-p16k.ss +N; 齙--4.ß; ; xn----4-p16k.xn--zca +B; 齙--4.SS; 齙--4.ss; xn----4-p16k.ss +B; 齙--4.ss; ; xn----4-p16k.ss +B; 齙--4.Ss; 齙--4.ss; xn----4-p16k.ss +B; xn----4-p16k.ss; 齙--4.ss; xn----4-p16k.ss +B; xn----4-p16k.xn--zca; 齙--4.ß; xn----4-p16k.xn--zca +B; 齙--𝟰.SS; 齙--4.ss; xn----4-p16k.ss +B; 齙--𝟰.ss; 齙--4.ss; xn----4-p16k.ss +B; 齙--𝟰.Ss; 齙--4.ss; xn----4-p16k.ss +T; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 V5] # ᯲.𐹢𞀖 +N; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 +B; xn--0zf.xn--9n0d2296a; [B1 V5]; [B1 V5] # ᯲.𐹢𞀖 +B; xn--0zf.xn--0ug9894grqqf; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 +T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +B; xn--ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +B; XN--CT86D8W51A.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +B; Xn--Ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +T; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 +B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 +B; xn--jof.xn----gf4bq282iezpa; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᩠.-𝪩悎 +B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; xn--xx5gy2741c.xn--re6hw266j; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; \u071C𐫒\u062E.𐋲; [B1]; [B1] # ܜ𐫒خ.𐋲 +B; xn--tgb98b8643d.xn--m97c; [B1]; [B1] # ܜ𐫒خ.𐋲 +B; 𐼑𞤓\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; 𐼑𞤵\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; xn--2gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; XN--2GB08K9W69AGM0G.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; Xn--2Gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; Ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ +B; Ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ +B; ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ +B; xn--r5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # ↄ.᳔੍ +B; xn--q5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # Ↄ.᳔੍ +B; ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ +B; 󠪢-。򛂏≮𑜫; [P1 V3 V6]; [P1 V3 V6] +B; 󠪢-。򛂏<\u0338𑜫; [P1 V3 V6]; [P1 V3 V6] +B; xn----bh61m.xn--gdhz157g0em1d; [V3 V6]; [V3 V6] +T; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ +N; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ +T; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ +N; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ +T; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ +N; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ +T; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ +N; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ +B; xn--3n36e.xn--gdh992byu01p; [V6]; [V6] +B; xn--0ugc90904y.xn--gdh992byu01p; [C1 C2 V6]; [C1 C2 V6] # .≮ⴉ +B; xn--3n36e.xn--hnd112gpz83n; [V6]; [V6] +B; xn--0ugc90904y.xn--hnd112gpz83n; [C1 C2 V6]; [C1 C2 V6] # .≮Ⴉ +B; 𐹯-𑄴\u08BC。︒䖐⾆; [B1 P1 V6]; [B1 P1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 +B; 𐹯-𑄴\u08BC。。䖐舌; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 +B; xn----rpd7902rclc..xn--fpo216m; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 +B; xn----rpd7902rclc.xn--fpo216mn07e; [B1 V6]; [B1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; xn--7kj1858k.xn--pi6b; [V5]; [V5] +B; xn--ond3755u.xn--pi6b; [V5 V6]; [V5 V6] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. +B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. +B; xn--o4c4837g2zvb.xn--5f70g; [B1 V5 V6]; [B1 V5 V6] # ฺ쩁𐹬. +T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ +N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ +T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ +N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ +B; xn--0-z6j.xn--8lh28773l; [V6]; [V6] +B; xn--0-z6jy93b.xn--8lh28773l; [C1 V6]; [C1 V6] # ᡅ0.⎢ +T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß +N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß +N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.ss; [V6]; [V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.xn--ss-l1t; [C2 V6]; [C2 V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.xn--zca770n; [C2 V6]; [C2 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +B; ꗷ𑆀.\u075D𐩒; ; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 +B; xn--ju8a625r.xn--hpb0073k; ꗷ𑆀.\u075D𐩒; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 +B; ⒐≯-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] +B; ⒐>\u0338-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] +B; 9.≯-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; 9.>\u0338-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; 9.xn----ogo..xn----xj54d1s69k; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn----ogot9g.xn----n89hl0522az9u2a; [V3 V6]; [V3 V6] +B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. +B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. +B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. +B; xn--vuc226n8n28lmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣⴡ. +B; xn--vuc49qvu85xmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣჁ. +B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. +B; \u1DEB。𐋩\u0638-𐫮; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 +B; xn--gfg.xn----xnc0815qyyg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 +B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋ⴝ; [B5]; [B5] +B; 싇。舛𐳋ⴝ; [B5]; [B5] +B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐲋ⴝ; [B5]; [B5] +B; 싇。舛𐲋ⴝ; [B5]; [B5] +B; xn--9u4b.xn--llj123yh74e; [B5]; [B5] +B; xn--9u4b.xn--1nd7519ch79d; [B5 V6]; [B5 V6] +B; 싇。⾇𐳋ⴝ; [B5]; [B5] +B; 싇。⾇𐳋ⴝ; [B5]; [B5] +B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐲋ⴝ; [B5]; [B5] +B; 싇。⾇𐲋ⴝ; [B5]; [B5] +T; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠ς.ڿჀ +N; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠ς.ڿჀ +T; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠ς.ڿⴠ +N; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ +T; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠σ.ڿჀ +N; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠σ.ڿჀ +T; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠σ.ڿⴠ +N; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb467q; [B1 B2 B3]; [B1 B2 B3] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb632c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠σ.ڿჀ +B; xn--4xa9167k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠σ.ڿჀ +B; xn--3xa1267k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ +B; xn--3xa1267k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠ς.ڿჀ +T; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ß +N; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ß +T; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +T; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +T; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +B; xn--mfb98261i.xn---ss-sdf; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .ښ-ss +B; xn--mfb144kqo32m.xn---ss-sdf; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ss +B; xn--mfb144kqo32m.xn----qfa315b; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ß +T; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +T; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +T; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +B; xn--zib539f.xn----twc1133r17r6g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٺ.-ۓ +B; xn--zib539f8igea.xn----twc1133r17r6g; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # ٺ.-ۓ +B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] +B; xn--dd55c.xn--1ch3003g; [B3 V6]; [B3 V6] +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; xn--fgd0675v.xn--imb5839fidpcbba; [B1 V5 V6]; [B1 V5 V6] # ྲ.𐹮𐹷덝۵ +T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ +N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ +T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ +N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ +T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ +N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; xn----3vs.xn--0kj; [V3]; [V3] +B; xn----3vs.xn--1ug532c; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; xn----00g.xn--hnd; [V3 V6]; [V3 V6] +B; xn----00g.xn--hnd399e; [C2 V3 V6]; [C2 V3 V6] # Ⴏ-.Ⴉ +T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ +N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; ⇧𐨏󠾈󯶅。\u0600󠈵󠆉; [B1 P1 V6]; [B1 P1 V6] # ⇧𐨏. +B; xn--l8g5552g64t4g46xf.xn--ifb08144p; [B1 V6]; [B1 V6] # ⇧𐨏. +B; ≠𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] +B; =\u0338𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] +B; ≠𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] +B; =\u0338𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] +B; xn--1chy492g.xn--6,1-pw1a.; [B1 P1 V6]; [B1 P1 V6] +B; xn--1chy492g.xn--45gx9iuy44d; [B1 V6]; [B1 V6] +T; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ß.ᢤ𐹫 +N; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ß.ᢤ𐹫 +T; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +T; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +T; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +B; xn--ss-zb11ap1427e.xn--ubf2596jbt61c; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--ss-zb11ap1427e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ss.ᢤ𐹫 +B; xn--zca3153vupz3e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ß.ᢤ𐹫 +T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ +B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; xn--ss-rek7420r4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssႧ.ꙺ +B; xn--ss-e61ar955h4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssⴇ.ꙺ +B; xn--zca227tpy4lkns1b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßⴇ.ꙺ +B; xn--zca491fci5qkn79a.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ +B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; \u1714。󠆣-𑋪; [V3 V5]; [V3 V5] # ᜔.-𑋪 +B; xn--fze.xn----ly8i; [V3 V5]; [V3 V5] # ᜔.-𑋪 +T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; xn----pw5e.xn--ss-7jd10716y; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽss +B; xn----pw5e.xn--zca50wfv060a; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽß +B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; ᡓ-≮。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ +B; ᡓ-<\u0338。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ +B; xn----s7j866c.xn--kib252g; [B1 B6 V6]; [B1 B6 V6] # ᡓ-≮.٫ᡄ +B; 𝟥♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +B; 3♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +B; xn--3-ksd277tlo7s.xn--8-f0jx021l; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ +N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ +T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ +N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ +B; -.xn--nei54421f; [V3 V6]; [V3 V6] +B; -.xn--1ug800aq795s; [C2 V3 V6]; [C2 V3 V6] # -.❡ +B; 𝟓☱𝟐򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] +B; 5☱2򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] +B; xn--52-dwx47758j.xn--kd3hk431k; [V5 V6]; [V5 V6] +B; -.-├򖦣; [P1 V3 V6]; [P1 V3 V6] +B; -.xn----ukp70432h; [V3 V6]; [V3 V6] +T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. +N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. +T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. +N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. +B; xn--wcb62g.xn--p526e; [B1 V5 V6]; [B1 V5 V6] # ֥ݭ. +B; xn--wcb62g.xn--1ugy8001l; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ֥ݭ. +T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 +N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 +N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 +B; xn--5kj3511ccyw3h.1.1.xn--7w9c; [B1 V6]; [B1 V6] +B; xn--5kj3511ccyw3h.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.1.1.𐫒 +B; xn--mnd7865gcy28g.1.1.xn--7w9c; [B1 V6]; [B1 V6] +B; xn--mnd7865gcy28g.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +B; xn--5kj3511ccyw3h.xn--tsha6797o; [B1 V6]; [B1 V6] +B; xn--5kj3511ccyw3h.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.⒈⒈𐫒 +B; xn--mnd7865gcy28g.xn--tsha6797o; [B1 V6]; [B1 V6] +B; xn--mnd7865gcy28g.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.⒈⒈𐫒 +B; \u0827𝟶\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +B; \u08270\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +B; xn--0--p3d67m.xn--v80d; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +T; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ +N; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ +T; ς.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +N; ς.\u0641\u0645\u064A🞛1.; ; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +B; Σ.\u0641\u0645\u064A🞛1.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; σ.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; xn--4xa.xn--1-gocmu97674d.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; xn--3xa.xn--1-gocmu97674d.; ς.\u0641\u0645\u064A🞛1.; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +B; Σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ +B; σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ +B; xn--4xa.xn--dhbip2802atb20c; [V6]; [V6] # σ.فمي🞛⒈ +B; xn--3xa.xn--dhbip2802atb20c; [V6]; [V6] # ς.فمي🞛⒈ +B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----6t3s.xn--zo0d4811u6ru6a; [B1 V3 V6]; [B1 V3 V6] +T; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 +N; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 +T; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 +N; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 +B; xn----5j4iv089c.xn--5-bn7i; [B1 B3 V6]; [B1 B3 V6] +B; xn----5j4iv089c.xn--5-sgn7149h; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𐡜-🔪.5 +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; xn--ss-i05i7041a.xn--0-vgc50n; [B1]; [B1] # 𐹣늿ss.ߏ0ּ +B; xn--ss-l1tu910fo0xd.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; xn--zca770n5s4hev6c.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] +B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] +B; 9.xn--dbf91222q; [V6]; [V6] +T; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 +N; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 +T; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 +N; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 +T; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +B; xn--psd.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 +B; xn--psd526e.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 +B; xn--psd526e.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 +T; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +B; xn--cl7c.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 +B; xn--0ug7719f.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 +B; xn--0ug7719f.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 +T; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒Ⴖͦ. +N; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒Ⴖͦ. +T; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # .Ⴖͦ. +N; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # .Ⴖͦ. +T; 。ⴖ\u0366.\u200C; [C1 A4_2]; [A4_2] # .ⴖͦ. +N; 。ⴖ\u0366.\u200C; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. +B; .xn--hva754s.; [A4_2]; [A4_2] # .ⴖͦ. +B; .xn--hva754s.xn--0ug; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. +B; .xn--hva929d.; [V6 A4_2]; [V6 A4_2] # .Ⴖͦ. +B; .xn--hva929d.xn--0ug; [C1 V6 A4_2]; [C1 V6 A4_2] # .Ⴖͦ. +T; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒ⴖͦ. +N; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒ⴖͦ. +B; xn--hva754sy94k.; [V6]; [V6] # ︒ⴖͦ. +B; xn--hva754sy94k.xn--0ug; [C1 V6]; [C1 V6] # ︒ⴖͦ. +B; xn--hva929dl29p.; [V6]; [V6] # ︒Ⴖͦ. +B; xn--hva929dl29p.xn--0ug; [C1 V6]; [C1 V6] # ︒Ⴖͦ. +B; xn--hva754s.; ⴖ\u0366.; xn--hva754s. # ⴖͦ. +B; ⴖ\u0366.; ; xn--hva754s. # ⴖͦ. +B; Ⴖ\u0366.; [P1 V6]; [P1 V6] # Ⴖͦ. +B; xn--hva929d.; [V6]; [V6] # Ⴖͦ. +T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +B; xn--hzb.xn--ukj4430l; \u08BB.ⴃ𞀒; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +B; \u08BB.ⴃ𞀒; ; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +B; \u08BB.Ⴃ𞀒; [P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +B; xn--hzb.xn--bnd2938u; [V6]; [V6] # ࢻ.Ⴃ𞀒 +B; xn--hzb.xn--0ug822cp045a; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +B; xn--hzb.xn--bnd300f7225a; [B1 C1 V6]; [B1 C1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 +N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 +T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 +N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 +B; .xn--2-me5ay1273i; [V6 A4_2]; [V6 A4_2] +B; xn--0ugb.xn--2-me5ay1273i; [C1 C2 V6]; [C1 C2 V6] # .2䫷 +B; -𞀤󜠐。򈬖; [P1 V3 V6]; [P1 V3 V6] +B; xn----rq4re4997d.xn--l707b; [V3 V6]; [V3 V6] +T; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ +N; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ +T; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ +N; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ +T; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. +N; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. +T; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. +N; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. +B; xn--z272f.xn--etl.xn--1-smc.; [V6]; [V6] # .㟀.ؤ1. +B; xn--z272f.xn--0ug754g.xn--1-smc.; [B1 C1 V6]; [B1 C1 V6] # .㟀.ؤ1. +B; xn--etlt457ccrq7h.xn--jgb476m; [V6]; [V6] # ︒㟀.ؤ⒈ +B; xn--0ug754gxl4ldlt0k.xn--jgb476m; [C1 V6]; [C1 V6] # ︒㟀.ؤ⒈ +T; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- +N; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- +B; xn--lsb5482l7nre.-; [B1 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- +B; xn--lsb5482l7nre.xn----ugn; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- +T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +B; .xn--1chx23bzj4p; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--0ug.xn--1chx23bzj4p; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ⴉ≠𐫶 +B; .xn--hnd481gv73o; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--0ug.xn--hnd481gv73o; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .Ⴉ≠𐫶 +T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +N; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +T; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +N; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +B; \u0750。>\u0338Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。≯Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。≯σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。>\u0338σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; xn--3ob.xn--4xa718m; [B1 V6]; [B1 V6] # ݐ.≯σ +B; xn--3ob.xn--3xa918m; [B1 V6]; [B1 V6] # ݐ.≯ς +B; \u07FC𐸆.𓖏︒񊨩Ⴐ; [P1 V6]; [P1 V6] # .︒Ⴐ +B; \u07FC𐸆.𓖏。񊨩Ⴐ; [P1 V6]; [P1 V6] # ..Ⴐ +B; \u07FC𐸆.𓖏。񊨩ⴐ; [P1 V6]; [P1 V6] # ..ⴐ +B; xn--0tb8725k.xn--tu8d.xn--7kj73887a; [V6]; [V6] # ..ⴐ +B; xn--0tb8725k.xn--tu8d.xn--ond97931d; [V6]; [V6] # ..Ⴐ +B; \u07FC𐸆.𓖏︒񊨩ⴐ; [P1 V6]; [P1 V6] # .︒ⴐ +B; xn--0tb8725k.xn--7kj9008dt18a7py9c; [V6]; [V6] # .︒ⴐ +B; xn--0tb8725k.xn--ond3562jt18a7py9c; [V6]; [V6] # .︒Ⴐ +B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; xn--vfh16m67gx1162b.xn--ro1d; [V5 V6]; [V5 V6] +B; xn--9nd623g4zc5z060c.xn--ro1d; [V5 V6]; [V5 V6] +B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; 🄈。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 🄈.ࡄ +B; 7,。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ +B; 7,.xn--2vb13094p; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ +B; xn--107h.xn--2vb13094p; [B1 V6]; [B1 V6] # 🄈.ࡄ +T; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +N; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +T; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +N; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +B; <\u0338\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; <\u0338\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; <\u0338\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; xn--4vb505k.xn--ss-5z4j006a; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ss +B; xn--4vb505k.xn--zca7259goug; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ß +B; 󠆓⛏-。ꡒ; [V3]; [V3] +B; xn----o9p.xn--rc9a; [V3]; [V3] +T; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +N; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +T; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +N; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; xn--lgb32f2753cosb.xn--jkb91hlz1a; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; xn--lgb32f2753cosb.xn--jkb91hlz1azih; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; \u0816.𐨕𚚕; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ࠖ.𐨕 +B; xn--rub.xn--tr9c248x; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ࠖ.𐨕 +B; --。𽊆\u0767𐽋𞠬; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # --.ݧ𞠬 +B; --.xn--rpb6226k77pfh58p; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # --.ݧ𞠬 +B; 򛭦𐋥𹸐.≯\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ +B; 򛭦𐋥𹸐.>\u0338\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ +B; xn--887c2298i5mv6a.xn--vybt688qm8981a; [B1 V6]; [B1 V6] # 𐋥.≯ࢰࢦ +B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] +B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] +B; xn--2loy662coo60e.xn----0n4a; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- +N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- +N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +B; xn--ho0d.-; [B1 V3]; [B1 V3] +B; xn--ho0d.xn----tgn; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; xn--qutw175s.xn----mimu6tf67j; [V3 V6]; [V3 V6] # 帷.≯萺᷈- +T; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ +N; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [C2 P1 V6] # 攌꯭.ᢖ-Ⴘ +T; \u200D攌\uABED。ᢖ-ⴘ; [C2]; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +N; \u200D攌\uABED。ᢖ-ⴘ; [C2]; [C2] # 攌꯭.ᢖ-ⴘ +B; xn--p9ut19m.xn----mck373i; 攌\uABED.ᢖ-ⴘ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +B; 攌\uABED.ᢖ-ⴘ; ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +B; 攌\uABED.ᢖ-Ⴘ; [P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ +B; xn--p9ut19m.xn----k1g451d; [V6]; [V6] # 攌꯭.ᢖ-Ⴘ +B; xn--1ug592ykp6b.xn----mck373i; [C2]; [C2] # 攌꯭.ᢖ-ⴘ +B; xn--1ug592ykp6b.xn----k1g451d; [C2 V6]; [C2 V6] # 攌꯭.ᢖ-Ⴘ +T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ +N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ +T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ +N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ +T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ +N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ +T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ +N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ +B; xn--9r8a.16.xn--3-nyc0117m; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m +B; ꖨ.16.3툒۳; ; xn--9r8a.16.xn--3-nyc0117m +B; ꖨ.16.3툒۳; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m +B; xn--0ug2473c.16.xn--3-nyc0117m; [C1]; [C1] # ꖨ.16.3툒۳ +B; xn--9r8a.xn--3-nyc678tu07m; [V6]; [V6] +B; xn--0ug2473c.xn--3-nyc678tu07m; [C1 V6]; [C1 V6] # ꖨ.⒗3툒۳ +B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې +B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې +B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې +B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې +B; 1.xn--6-945e.xn--glb1794k; [B1]; [B1] # 1.걾6.𐱁ې +B; xn--6-dcps419c.xn--glb1794k; [B1 V6]; [B1 V6] # ⒈걾6.𐱁ې +B; 𐲞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; xn--0-ngoa5711v.xn--4gb31034p; [B1 B3 V6]; [B1 B3 V6] # 𐳞0≮≮.ع +B; 𐳞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; \u0AE3.𐹺\u115F; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ૣ.𐹺 +B; xn--8fc.xn--osd3070k; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ૣ.𐹺 +T; 𝟏𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. +N; 𝟏𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. +T; 1𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. +N; 1𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. +B; xn--1-5bt6845n.; 1𝨙⸖.; xn--1-5bt6845n.; NV8 +B; 1𝨙⸖.; ; xn--1-5bt6845n.; NV8 +B; xn--1-5bt6845n.xn--1ug; [C2]; [C2] # 1𝨙⸖. +T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; xn--wnb859grzfzw60c.xn----kcd; [B1 V3 V6]; [B1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; xn--wnb859grzfzw60c.xn----kcd017p; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; 𐹰\u0368-ꡧ。\u0675; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; 𐹰\u0368-ꡧ。\u0627\u0674; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; xn----shb2387jgkqd.xn--mgb8m; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; f.xn--45hz6953f; [V6]; [V6] +B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; \u0B4D𑄴\u1DE9。𝟮Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。2Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。2ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +B; xn--9ic246gs21p.xn--2-nws2918ndrjr; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +B; xn--9ic246gs21p.xn--2-k1g43076adrwq; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。𝟮ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +T; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [P1 V6] # ⒈.勉𑁅 +N; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [C1 P1 V6] # ⒈.勉𑁅 +T; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..勉𑁅 +N; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..勉𑁅 +B; xn--1-yi00h..xn--4grs325b; [V6 A4_2]; [V6 A4_2] +B; xn--1-rgna61159u..xn--4grs325b; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..勉𑁅 +B; xn--tsh11906f.xn--4grs325b; [V6]; [V6] +B; xn--0uga855aez302a.xn--4grs325b; [C1 V6]; [C1 V6] # ⒈.勉𑁅 +B; ᡃ.玿񫈜󕞐; [P1 V6]; [P1 V6] +B; xn--27e.xn--7cy81125a0yq4a; [V6]; [V6] +T; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 +N; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 +T; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 +N; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 +T; \u200C\u200C。1.≯9; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 +N; \u200C\u200C。1.≯9; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 +T; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 +N; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 +B; .1.xn--9-ogo; [V6 A4_2]; [V6 A4_2] +B; xn--0uga.1.xn--9-ogo; [C1 V6]; [C1 V6] # .1.≯9 +B; .xn--9-ogo37g; [V6 A4_2]; [V6 A4_2] +B; xn--0uga.xn--9-ogo37g; [C1 V6]; [C1 V6] # .⒈≯9 +B; \u115F\u1DE0򐀁.𺻆≯𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 +B; \u115F\u1DE0򐀁.𺻆>\u0338𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 +B; xn--osd615d5659o.xn--hdh5192gkm6r; [B5 B6 V6]; [B5 B6 V6] # ᷠ.≯𐮁 +T; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +N; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; xn--9gb5080v.xn----qgd52296avol4f; [B1 V5 V6]; [B1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; xn--9gb723kg862a.xn----qgd52296avol4f; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- +B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- +B; xn--w0g.xn----bd0j; [V3 V5]; [V3 V5] # ⃚.𑘿- +T; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ +N; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ +B; 䮸SS.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; 䮸ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; 䮸Ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; xn--ss-sf1c.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ss.紙ࢨ +B; xn--zca5349a.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ß.紙ࢨ +B; -Ⴞ.-𝩨⅔𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -Ⴞ.-𝩨2⁄3𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -ⴞ.-𝩨2⁄3𐦕; [B1 V3]; [B1 V3] +B; xn----zws.xn---23-pt0a0433lk3jj; [B1 V3]; [B1 V3] +B; xn----w1g.xn---23-pt0a0433lk3jj; [B1 V3 V6]; [B1 V3 V6] +B; -ⴞ.-𝩨⅔𐦕; [B1 V3]; [B1 V3] +B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ +B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ +B; xn--bfc7604kv8m3g.xn--17e5565jl7zw4h16a; [B5 B6 V6]; [B5 B6 V6] # 𐹯ૂ.𐮁ᡂ +T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ +N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ +T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ +N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ +B; xn----gyg3618i.xn--jc9ao4185a; [V5 V6]; [V5 V6] # ႂ-꣪.ꡊ +B; xn----gyg250jio7k.xn--1ug8774cri56d; [C2 V5 V6]; [C2 V5 V6] # ႂ-꣪.ꡊ +B; ۱。≠\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ +B; ۱。=\u0338\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ +B; xn--emb.xn--hib334l; [B1 V6]; [B1 V6] # ۱.≠٨ +B; 𑈵廊.𐠍; [V5]; [V5] +B; xn--xytw701b.xn--yc9c; [V5]; [V5] +T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ +N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ +N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ +N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; xn----rgb.xn----bqc2280a; [B1 V3 V5]; [B1 V3 V5] # ͖-.-ⴐ١ +B; xn----rgb661t.xn----bqc2280a; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; xn----rgb.xn----bqc030f; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͖-.-Ⴐ١ +B; xn----rgb661t.xn----bqc030f; [B1 C2 V3 V6]; [B1 C2 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ +N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; \u063A\u0661挏󾯐.-; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # غ١挏.- +B; xn--5gb2f4205aqi47p.-; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # غ١挏.- +B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 +B; xn--cmb.xn--fo0dy848a; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; xn--mlj0486jgl2j.xn--hbf6853f; [V6]; [V6] +B; xn--2nd8876sgl2j.xn--hbf6853f; [V6]; [V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +B; xn--qkb4516kbi06fg2id.xn--zfg31q; [B1 V6]; [B1 V6] # ڷ𐹷.≯᷾ +B; xn--qkb4516kbi06fg2id.xn--zfg59fm0c; [B1 C1 V6]; [B1 C1 V6] # ڷ𐹷.≯᷾ +T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- +N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- +T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- +N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- +B; xn--fxe63563p.xn----q26i2bvu; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn--fxe848bq3411a.xn----q26i2bvu; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # ᛎ.𐹾𐹪- +B; 𐹶.𐫂; [B1]; [B1] +B; xn--uo0d.xn--rw9c; [B1]; [B1] +T; ß\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ß်.⒈ +N; ß\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ß်.⒈ +T; ß\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ß်.1. +N; ß\u200D\u103A。1.; [C2]; [C2] # ß်.1. +T; SS\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; SS\u200D\u103A。1.; [C2]; [C2] # ss်.1. +T; ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. +T; Ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; Ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. +B; xn--ss-f4j.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; ss\u103A.1.; ; xn--ss-f4j.1. # ss်.1. +B; SS\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; Ss\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; xn--ss-f4j585j.1.; [C2]; [C2] # ss်.1. +B; xn--zca679eh2l.1.; [C2]; [C2] # ß်.1. +T; SS\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; SS\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +T; ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +T; Ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; Ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +B; xn--ss-f4j.xn--tsh; [V6]; [V6] # ss်.⒈ +B; xn--ss-f4j585j.xn--tsh; [C2 V6]; [C2 V6] # ss်.⒈ +B; xn--zca679eh2l.xn--tsh; [C2 V6]; [C2 V6] # ß်.⒈ +T; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ୍. +N; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ୍. +B; xn--9ic6417rn4xb.; [B1 V5 V6]; [B1 V5 V6] # ୍. +B; xn--9ic637hz82z32jc.xn--1ug; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ୍. +B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 +B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 +B; xn--c29c.xn--vkb8871w; [B3]; [B3] # 𐮅.ڼ🁕 +T; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ؠ្.𐫔𑈵 +N; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ؠ្.𐫔𑈵 +B; xn--fgb471g.xn--9w9c29jw3931a; [B2 B3 V6]; [B2 B3 V6] # ؠ្.𐫔𑈵 +B; xn--fgb471g.xn--0ug9853g7verp838a; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ؠ្.𐫔𑈵 +B; 񋉕.𞣕𞤊; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; 񋉕.𞣕𞤬; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--tf5w.xn--2b6hof; [B1 V5 V6]; [B1 V5 V6] +T; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 +N; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +T; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 +N; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.ss\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; xn--clb2593k.xn--ss-toj6092t; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; xn--clb2593k.xn--zca216edt0r; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +T; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +B; xn--8-ngo.xn--z3e; [V5 V6]; [V5 V6] # 8≮. +B; xn--8-sgn10i.xn--z3e; [C1 V5 V6]; [C1 V5 V6] # 8≮. +B; ᢕ≯︒񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338︒񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯。񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338。񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338。񄂯.ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯。񄂯.ⴀ; [P1 V6]; [P1 V6] +B; xn--fbf851c.xn--ko1u.xn--rkj; [V6]; [V6] +B; xn--fbf851c.xn--ko1u.xn--7md; [V6]; [V6] +B; ᢕ>\u0338︒񄂯.ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯︒񄂯.ⴀ; [P1 V6]; [P1 V6] +B; xn--fbf851cq98poxw1a.xn--rkj; [V6]; [V6] +B; xn--fbf851cq98poxw1a.xn--7md; [V6]; [V6] +B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; xn--vfd.xn----fhd; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] +B; xn--tbg.xn--11-5o7k.1.xn--k469f; [V6]; [V6] +B; xn--tbg.xn--tsht7586kyts9l; [V6]; [V6] +T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; xn--4xa6371khhl.xn--hib; [B1]; [B1] # σ𑓂𐋢.٨ +B; xn--3xa8371khhl.xn--hib; [B1]; [B1] # ς𑓂𐋢.٨ +B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +T; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 +N; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 B6 C2 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; xn--3j9a531o.xn--urju692efj0f; [B1 V5 V6]; [B1 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; xn--0ugc8356he76c.xn--urju692efj0f; [B1 B6 C2 V5 V6]; [B1 B6 C2 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; ⊼。񪧖\u0695; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ⊼.ڕ +B; xn--ofh.xn--rjb13118f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⊼.ڕ +B; 𐯬񖋔。󜳥; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--949co370q.xn--7g25e; [B2 B3 V6]; [B2 B3 V6] +T; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 +N; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 +B; \u0601𑍧\u07DD。Σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 +B; \u0601𑍧\u07DD。σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 +B; xn--jfb66gt010c.xn--4xa623h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.σ🀞 +B; xn--jfb66gt010c.xn--3xa823h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.ς🀞 +B; -𐳲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐳲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐲲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; xn----roc5482rek10i.xn--3-zw5e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐲲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +B; xn--6v56e.xn--gdhz712gzlr6b; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug22251l.xn--gdhz712gzlr6b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .≮𐦜 +B; ⒈✌򟬟.𝟡񠱣; [P1 V6]; [P1 V6] +B; 1.✌򟬟.9񠱣; [P1 V6]; [P1 V6] +B; 1.xn--7bi44996f.xn--9-o706d; [V6]; [V6] +B; xn--tsh24g49550b.xn--9-o706d; [V6]; [V6] +B; 𑆾𞤬𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +B; 𑆾𞤊𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +B; xn--d29c79hf98r.xn--fib011j; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; xn--4xa.xn--0f9ars; [V5]; [V5] # σ.꧀꣄ +B; xn--3xa.xn--0f9ars; [V5]; [V5] # ς.꧀꣄ +B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +B; xn--hdhz343g3wj.xn--qwb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑰶≯𐳐.࡛ +B; xn--0ug06g7697ap4ma.xn--qwb; [B1 B3 B6 C1 V5 V6]; [B1 B3 B6 C1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +B; 羚。≯; [P1 V6]; [P1 V6] +B; 羚。>\u0338; [P1 V6]; [P1 V6] +B; 羚。≯; [P1 V6]; [P1 V6] +B; 羚。>\u0338; [P1 V6]; [P1 V6] +B; xn--xt0a.xn--hdh; [V6]; [V6] +B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ +B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ +B; xn--e1e9580k.xn--xyb; [B1 V5 V6]; [B1 V5 V6] # 𑓂.ࢨ +T; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 +N; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 +N; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +T; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +B; xn--1r19e.xn--3-ozb36ko13f; [B1 V6]; [B1 V6] # .٣ҡⴠ3 +B; xn--1ug89936l.xn--3-ozb36ko13f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡⴠ3 +B; xn--1r19e.xn--3-ozb36kixu; [B1 V6]; [B1 V6] # .٣ҡჀ3 +B; xn--1ug89936l.xn--3-ozb36kixu; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +T; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +B; ᡷ。𐹢\u08E0; [B1]; [B1] # ᡷ.𐹢࣠ +B; xn--k9e.xn--j0b5005k; [B1]; [B1] # ᡷ.𐹢࣠ +T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; xn--1zf58212h.xn--ss-pyd459o3258m; [B1 V6]; [B1 V6] # ᯳.٦្ss +B; xn--1zf58212h.xn--zca34zk4qx711k; [B1 V6]; [B1 V6] # ᯳.٦្ß +B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; \u0664򤽎𑲛.󠔢︒≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ +B; \u0664򤽎𑲛.󠔢︒=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ +B; \u0664򤽎𑲛.󠔢。≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ +B; \u0664򤽎𑲛.󠔢。=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ +B; xn--dib0653l2i02d.xn--k736e.xn--1ch; [B1 V6]; [B1 V6] # ٤𑲛..≠ +B; xn--dib0653l2i02d.xn--1ch7467f14u4g; [B1 V6]; [B1 V6] # ٤𑲛.︒≠ +B; ➆񷧕ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕o\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕o\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕O\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕Ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; xn--1-3xm292b6044r..xn--9-6jd87310jtcqs; [V6 A4_2]; [V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕O\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕Ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; xn--6lg26tvvc6v99z.xn--9-6jd87310jtcqs; [V6]; [V6] # ➆ỗ⒈.࡛9 +T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 +B; .xn--ye6h; [A4_2]; [A4_2] +B; xn--1ug.xn--ye6h; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 +B; xn--ye6h; 𞤺; xn--ye6h +B; 𞤺; ; xn--ye6h +B; 𞤘; 𞤺; xn--ye6h +B; \u0829\u0724.ᢣ; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ +B; xn--unb53c.xn--tbf; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ +T; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ß +N; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ß +T; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +T; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +T; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +B; xn----s2c.xn--ss-066q; [V3 V5 V6]; [V3 V5 V6] # ܼ-.ss +B; xn----s2c071q.xn--ss-066q; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ss +B; xn----s2c071q.xn--zca7848m; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ß +T; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ +N; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ +T; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # ς🃡16..ೆ仧ݖ +N; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ +T; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +N; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +T; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +N; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-ubc66061c..xn--9ob79ycx2e; [B5 B6 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-ubc7700avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-rbc1800avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ +T; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +N; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +T; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +N; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--4xa229nbu92a.xn--9ob79ycx2e; [B5 B6 V5 V6]; [B5 B6 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--4xa595lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--3xa795lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # ς🃡⒗.ೆ仧ݖ +B; -.𞸚; [B1 V3]; [B1 V3] # -.ظ +B; -.\u0638; [B1 V3]; [B1 V3] # -.ظ +B; -.xn--3gb; [B1 V3]; [B1 V3] # -.ظ +B; 򏛓\u0683.\u0F7E\u0634; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڃ.ཾش +B; xn--8ib92728i.xn--zgb968b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڃ.ཾش +B; \u0FE6\u0843񽶬.𐮏; [B5 P1 V6]; [B5 P1 V6] # ࡃ.𐮏 +B; xn--1vb320b5m04p.xn--m29c; [B5 V6]; [B5 V6] # ࡃ.𐮏 +T; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ +N; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ +B; 2񎨠\u07CBSS。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; 2񎨠\u07CBss。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; 2񎨠\u07CBSs。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; xn--2ss-odg83511n.xn--w7e; [B1 V6]; [B1 V6] # 2ߋss.ᠽ +B; xn--2-qfa924cez02l.xn--w7e; [B1 V6]; [B1 V6] # 2ߋß.ᠽ +T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn--ss--qjf; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn--ss--qjf2343a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn----pfa076bys4a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +B; -򷝬\u135E𑜧.\u1DEB-︒; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-︒ +B; -򷝬\u135E𑜧.\u1DEB-。; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-. +B; xn----b5h1837n2ok9f.xn----mkm.; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-. +B; xn----b5h1837n2ok9f.xn----mkmw278h; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-︒ +B; ︒.򚠡\u1A59; [P1 V6]; [P1 V6] # ︒.ᩙ +B; 。.򚠡\u1A59; [P1 V6 A4_2]; [P1 V6 A4_2] # ..ᩙ +B; ..xn--cof61594i; [V6 A4_2]; [V6 A4_2] # ..ᩙ +B; xn--y86c.xn--cof61594i; [V6]; [V6] # ︒.ᩙ +T; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ +N; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ +B; xn--kta899s.xn--skb116m; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ +B; xn--kta899s.xn--skb970ka771c; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ +B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; xn--tlb199fwl35a.xn--yld4613v; [B1 B2 V5 V6]; [B1 B2 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𑰺.-򑟏; [P1 V3 V5 V6]; [P1 V3 V5 V6] +B; xn--jk3d.xn----iz68g; [V3 V5 V6]; [V3 V5 V6] +B; 󠻩.赏; [P1 V6]; [P1 V6] +B; 󠻩.赏; [P1 V6]; [P1 V6] +B; xn--2856e.xn--6o3a; [V6]; [V6] +B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +B; xn--jkb440g.xn--skj; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +B; xn--jkb440g.xn--8md; [B2 B3 V6]; [B2 B3 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; xn--4xa33m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; xn--4xa33mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; xn--3xa53mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; xn--3xa53m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. +N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. +T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. +N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. +T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. +N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. +B; xn--4kj.xn--p01x; [V6]; [V6] +B; xn--4kj.xn--0ug56448b; [C1 V6]; [C1 V6] # ⴍ. +B; xn--lnd.xn--p01x; [V6]; [V6] +B; xn--lnd.xn--0ug56448b; [C1 V6]; [C1 V6] # Ⴍ. +T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. +N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. +B; 򉟂󠵣.𐫫\u1A60󴺖\u1B44; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # .𐫫᩠᭄ +B; xn--9u37blu98h.xn--jof13bt568cork1j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # .𐫫᩠᭄ +B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; xn--i7e163ct2d.xn--vwj7372e; [B1 V6]; [B1 V6] +B; 􁕜𐹧𞭁𐹩。Ⴈ𐫮Ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 􁕜𐹧𞭁𐹩。ⴈ𐫮ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--fo0de1270ope54j.xn--zkjo0151o; [B5 B6 V6]; [B5 B6 V6] +B; xn--fo0de1270ope54j.xn--gndo2033q; [B5 B6 V6]; [B5 B6 V6] +B; 𞠂。\uA926; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ +B; xn--145h.xn--ti9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ +B; 𝟔𐹫.\u0733\u10379ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; 𝟔𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; 6𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; xn--6-t26i.xn--9-91c730e8u8n; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ +B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ +B; xn--lfb19ct414i.xn--olb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ܤ.ۘ +T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز +N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز +T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز +N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز +B; xn--1biv525bcix0d.xn--xgb6828v; [B1 V6]; [B1 V6] # ✆ꡋ.ز +B; xn--1biv525bcix0d.xn--xgb253k0m73a; [B1 C2 V6]; [B1 C2 V6] # ✆ꡋ.ز +B; \u0845񃾰𞸍-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰𞸍-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰\u0646-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰\u0646-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; xn----qoc64my971s.xn--1ch7585g76o3c; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ࡅن-.≠𑋪 +B; 𝟛.笠; 3.笠; 3.xn--6vz +B; 𝟛.笠; 3.笠; 3.xn--6vz +B; 3.笠; ; 3.xn--6vz +B; 3.xn--6vz; 3.笠; 3.xn--6vz +T; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [P1 V3 V6] # -.Ⴞ𐋷 +N; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.Ⴞ𐋷 +T; -\u200D.ⴞ𐋷; [C2 V3]; [V3] # -.ⴞ𐋷 +N; -\u200D.ⴞ𐋷; [C2 V3]; [C2 V3] # -.ⴞ𐋷 +B; -.xn--mlj8559d; [V3]; [V3] +B; xn----ugn.xn--mlj8559d; [C2 V3]; [C2 V3] # -.ⴞ𐋷 +B; -.xn--2nd2315j; [V3 V6]; [V3 V6] +B; xn----ugn.xn--2nd2315j; [C2 V3 V6]; [C2 V3 V6] # -.Ⴞ𐋷 +T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் +N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் +N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +B; xn--ss-ubc826a.xn--xmc; [V5]; [V5] # σssܱ.் +B; xn--ss-ubc826ab34b.xn--xmc; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +B; xn--zca39lk1di19a.xn--xmc; [C2 V5]; [C2 V5] # σßܱ.் +B; xn--zca19ln1di19a.xn--xmc; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +B; xn--1ch.; [V6]; [V6] +B; xn--1ch.xn--1ug; [C2 V6]; [C2 V6] # ≠. +B; \uFC01。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; \u0626\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; \u064A\u0654\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; xn--lgbo.xn--2rc021dcxkrx55t; [B1 V5 V6]; [B1 V5 V6] # ئح.ಁᠼ▗ +T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; xn--4xa502av8297a.xn--4xa6055k; [V6]; [V6] # ্σ.σ𐨿 +T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; xn--4xa502av8297a.xn--3xa8055k; [V6]; [V6] # ্σ.ς𐨿 +B; xn--3xa702av8297a.xn--3xa8055k; [V6]; [V6] # ্ς.ς𐨿 +B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; xn--zsb09cu46vjs6f.xn--gmf469fr883am5r1e; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; xn--zsb09cu46vjs6f.xn--xnd909bv540bm5k9d; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; xn--s264a.xn--pw2b; [V6]; [V6] +T; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹷. +N; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹷. +B; xn--vo0d.xn--8088d; [B1 V6]; [B1 V6] +B; xn--1ugx205g.xn--8088d; [B1 C2 V6]; [B1 C2 V6] # 𐹷. +B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; xn--1kb147qfk3n.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; xn--1kb312c139t.-; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # Ⴘۂ𑲭.- +B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; \uA806\u067B₆ᡐ。🛇\uFCDD; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; \uA806\u067B6ᡐ。🛇\u064A\u0645; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; xn--6-rrc018krt9k.xn--hhbj61429a; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; 򸍂.㇄ᡟ𐫂\u0622; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ +B; 򸍂.㇄ᡟ𐫂\u0627\u0653; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ +B; xn--p292d.xn--hgb154ghrsvm2r; [B1 V6]; [B1 V6] # .㇄ᡟ𐫂آ +B; \u07DF򵚌。-\u07E9; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ߟ.-ߩ +B; xn--6sb88139l.xn----pdd; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ߟ.-ߩ +T; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # ςك襾.ᢟ⒈ +N; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ςك襾.ᢟ⒈ +T; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # ςك襾.ᢟ1. +N; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. +T; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. +N; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +T; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. +N; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +B; xn--4xa49jux8r.xn--1-4ck.; [B5]; [B5] # σك襾.ᢟ1. +B; xn--4xa49jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +B; xn--3xa69jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. +T; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ +N; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ +T; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ +N; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ +B; xn--4xa49jux8r.xn--pbf212d; [B5 V6]; [B5 V6] # σك襾.ᢟ⒈ +B; xn--4xa49jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # σك襾.ᢟ⒈ +B; xn--3xa69jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ςك襾.ᢟ⒈ +B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] +B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] +B; xn--57e0440k.xn--k86h; [V6]; [V6] +T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +B; xn--ybc461hph93b.xn--jzb29857e; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ੍𦍓ᷮ.ࢽ +B; xn--ybc461hph93b.xn--jzb740j1y45h; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ੍𦍓ᷮ.ࢽ +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +B; xn----dnc06f42153a.xn--v22b; [B2 B3 V3 V6]; [B2 B3 V3 V6] # خ݈-.먿 +B; xn----dnc06f42153a.xn--0ug1581d; [B1 B2 B3 C1 V3 V6]; [B1 B2 B3 C1 V3 V6] # خ݈-.먿 +B; 􋿦。ᠽ; [P1 V6]; [P1 V6] +B; 􋿦。ᠽ; [P1 V6]; [P1 V6] +B; xn--j890g.xn--w7e; [V6]; [V6] +T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ +N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ +T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ +N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ +B; xn--b6s0078f.xn--0ic; [V5]; [V5] # 嬃𝍌.ୄ +B; xn--b6s0078f.xn--0ic557h; [C2]; [C2] # 嬃𝍌.ୄ +B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; xn--kfb866llx01a.xn--wp1gm3570b; [B1 V6]; [B1 V6] # 𝌪≯. +B; 򫾥\u08B7\u17CC\uA9C0.𞼠; [B5 P1 V6]; [B5 P1 V6] # ࢷ៌꧀. +B; xn--dzb638ewm4i1iy1h.xn--3m7h; [B5 V6]; [B5 V6] # ࢷ៌꧀. +T; \u200C.񟛤; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C.񟛤; [C1 P1 V6]; [C1 P1 V6] # . +B; .xn--q823a; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--q823a; [C1 V6]; [C1 V6] # . +B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; xn--ukju77frl47r.xn--yl0d; [V6]; [V6] +B; xn--bnd074zr557n.xn--yl0d; [V6]; [V6] +B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; \u1BF1𐹳𐹵𞤚。𝟨Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤚。6Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤼。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; xn--zzfy954hga2415t.xn--6-kvs; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; xn--zzfy954hga2415t.xn--6-h0g; [B1 V5 V6]; [B1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤼。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; \u1BF1𐹳𐹵𞤚。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; \u1BF1𐹳𐹵𞤚。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; -。︒; [P1 V3 V6]; [P1 V3 V6] +B; -。。; [V3 A4_2]; [V3 A4_2] +B; -..; [V3 A4_2]; [V3 A4_2] +B; -.xn--y86c; [V3 V6]; [V3 V6] +B; \u07DBჀ。-⁵--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBჀ。-5--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBⴠ。-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; xn--2sb691q.-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; xn--2sb866b.-5--; [B1 B2 B3 V2 V3 V6]; [B1 B2 B3 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBⴠ。-⁵--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +T; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +T; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +T; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +B; xn--dqb73el09fncab4h.xn--kua81ls548d3608b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ルーブルࣦݼ.͆ +B; xn--dqb73ec22c9kp8cb1j.xn--kua81ls548d3608b; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ルーブルࣦݼ.͆ +T; \u200D.F; [C2]; [A4_2] # .f +N; \u200D.F; [C2]; [C2] # .f +T; \u200D.f; [C2]; [A4_2] # .f +N; \u200D.f; [C2]; [C2] # .f +B; .f; [A4_2]; [A4_2] +B; xn--1ug.f; [C2]; [C2] # .f +B; f; ; +T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß +N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß +T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß +N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß +T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss +T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss +T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss +B; xn--9bm.ss; 㨲.ss; xn--9bm.ss +B; 㨲.ss; ; xn--9bm.ss +B; 㨲.SS; 㨲.ss; xn--9bm.ss +B; 㨲.Ss; 㨲.ss; xn--9bm.ss +B; xn--1ug914h.ss; [C2]; [C2] # 㨲.ss +B; xn--1ug914h.xn--zca; [C2]; [C2] # 㨲.ß +T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss +T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss +T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss +B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ +B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ +B; xn--nfb6v.xn--xyb; [B1 V6]; [B1 V6] # پ.ࢨ +B; ⾑\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; 襾\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; 襾\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; xn--6ob9577deqwl.xn--7ib5526k; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; ⾑\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; xn----zmb705tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # σ-⃫.ݔ-ꡛ +B; xn----xmb015tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ς-⃫.ݔ-ꡛ +B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . +T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . +B; .xn--h327f; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--h327f; [C2 V6]; [C2 V6] # . +B; 񣭻񌥁。≠𝟲; [P1 V6]; [P1 V6] +B; 񣭻񌥁。=\u0338𝟲; [P1 V6]; [P1 V6] +B; 񣭻񌥁。≠6; [P1 V6]; [P1 V6] +B; 񣭻񌥁。=\u03386; [P1 V6]; [P1 V6] +B; xn--h79w4z99a.xn--6-tfo; [V6]; [V6] +T; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [P1 V6] # ᡭ. +N; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [B6 C2 P1 V6] # ᡭ. +B; xn--98e.xn--om9c; [V6]; [V6] +B; xn--98e810b.xn--om9c; [B6 C2 V6]; [B6 C2 V6] # ᡭ. +B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. +B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. +B; xn--kwb91r5112avtg.xn--o580f; [B1 V5 V6]; [B1 V5 V6] # ీࡕ𑄴. +T; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +B; xn--me6h.xn--z6fz8ueq2v; [B1 V5 V6]; [B1 V5 V6] # 𞤮.𑇊≯᳦ +B; xn--me6h.xn--z6f16kn9b2642b; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𞤮.𑇊≯᳦ +B; 󠄀𝟕.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 7.xn--0kjz523lv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; 7.xn--hnd3403vv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; 󠄀𝟕.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀𝟕.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 閃9𝩍。Ↄ\u0669\u08B1\u0B4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 閃9𝩍.Ↄ٩ࢱ୍ +B; 閃9𝩍。ↄ\u0669\u08B1\u0B4D; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ +B; xn--9-3j6dk517f.xn--iib28ij3c4t9a; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ +B; xn--9-3j6dk517f.xn--iib28ij3c0t9a; [B5 B6 V6]; [B5 B6 V6] # 閃9𝩍.Ↄ٩ࢱ୍ +B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F︒; [P1 V5 V6]; [P1 V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ +B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F。; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. +B; xn--2-2zf840fk16m.xn--sob093b2m7s.; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. +B; xn--2-2zf840fk16m.xn--sob093bj62sz9d; [V5 V6]; [V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ +B; 󅴧。≠-󠙄⾛; [P1 V6]; [P1 V6] +B; 󅴧。=\u0338-󠙄⾛; [P1 V6]; [P1 V6] +B; 󅴧。≠-󠙄走; [P1 V6]; [P1 V6] +B; 󅴧。=\u0338-󠙄走; [P1 V6]; [P1 V6] +B; xn--gm57d.xn----tfo4949b3664m; [V6]; [V6] +B; \u076E\u0604Ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ +B; \u076E\u0604Ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ +B; \u076E\u0604ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ +B; \u076E\u0604ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ +B; xn--mfb73ek93f.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮⴊ.-≠ +B; xn--mfb73ex6r.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮႪ.-≠ +T; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +B; xn--4-zhc0by36txt0w.; [B3 B4 V6]; [B3 B4 V6] # אל𐹧4≯. +B; xn--4-zhc0by36txt0w.xn--0ug; [B1 B3 B4 C1 V6]; [B1 B3 B4 C1 V6] # אל𐹧4≯. +B; 𝟎。甯; 0.甯; 0.xn--qny +B; 0。甯; 0.甯; 0.xn--qny +B; 0.xn--qny; 0.甯; 0.xn--qny +B; 0.甯; ; 0.xn--qny +B; -⾆.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ +B; -舌.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ +B; xn----ef8c.xn--2v9a; [V3 V5]; [V3 V5] # -舌.꫶ +B; -。ᢘ; [V3]; [V3] +B; -。ᢘ; [V3]; [V3] +B; -.xn--ibf; [V3]; [V3] +B; 🂴Ⴋ.≮; [P1 V6]; [P1 V6] +B; 🂴Ⴋ.<\u0338; [P1 V6]; [P1 V6] +B; 🂴ⴋ.<\u0338; [P1 V6]; [P1 V6] +B; 🂴ⴋ.≮; [P1 V6]; [P1 V6] +B; xn--2kj7565l.xn--gdh; [V6]; [V6] +B; xn--jnd1986v.xn--gdh; [V6]; [V6] +T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. +N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. +T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. +N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. +B; xn--gky8837e.; 璼𝨭.; xn--gky8837e. +B; 璼𝨭.; ; xn--gky8837e. +B; xn--gky8837e.xn--0ug; [C1]; [C1] # 璼𝨭. +B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 +B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 +B; xn--8-qqc97891f.xn---5-rp92a; [B1 V3 V6]; [B1 V3 V6] # ٩8.-5🞥 +T; \u200C.\u200C; [C1]; [A4_2] # . +N; \u200C.\u200C; [C1]; [C1] # . +B; xn--0ug.xn--0ug; [C1]; [C1] # . +T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ +N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ +T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ +N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ +B; xn--157b.xn--gnb; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ +B; 튛.\u0716; ; xn--157b.xn--gnb # 튛.ܖ +B; 튛.\u0716; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ +B; xn--1ug4441e.xn--gnb; [B1 C2]; [B1 C2] # 튛.ܖ +B; ᡋ𐹰𞽳.\u0779ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹⴞ +B; ᡋ𐹰𞽳.\u0779Ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹႾ +B; xn--b8e0417jocvf.xn--9pb068b; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹႾ +B; xn--b8e0417jocvf.xn--9pb883q; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹⴞ +B; 𐷃\u0662𝅻𝟧.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻5.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻5.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; xn--5-cqc8833rhv7f.xn--ykjz523efa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; xn--5-cqc8833rhv7f.xn--fnd3401kfa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻𝟧.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; Ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; xn--flj.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # ⴗ.𑄴ׂꦷ +B; xn--vnd.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # Ⴗ.𑄴ׂꦷ +B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; 𝟾𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ +B; 8𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ +B; xn--8-kh23b.xn--lib78461i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 8.٬ +B; ⒈酫︒。\u08D6; [P1 V5 V6]; [P1 V5 V6] # ⒈酫︒.ࣖ +B; 1.酫。。\u08D6; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ +B; 1.xn--8j4a..xn--8zb; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ +B; xn--tsh4490bfe8c.xn--8zb; [V5 V6]; [V5 V6] # ⒈酫︒.ࣖ +T; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ +N; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ +T; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ +N; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ +B; xn--uof548an0j.xn--o4c; [V5 V6]; [V5 V6] # ⷣ≮ᩫ.ฺ +B; xn--uof63xk4bf3s.xn--o4c732g; [C1 V5 V6]; [C1 V5 V6] # ⷣ≮ᩫ.ฺ +T; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 +N; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 +T; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 +N; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 +T; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 +N; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 +T; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 +N; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 +B; xn--co6h.xn--1-h1g429s; [V6]; [V6] +B; xn--co6h.xn--1-h1g398iewm; [B6 C2 V6]; [B6 C2 V6] # .Ⴗⴝ1 +B; xn--co6h.xn--1-kwssa; [V6]; [V6] +B; xn--co6h.xn--1-ugn710dya; [B6 C2 V6]; [B6 C2 V6] # .ⴗⴝ1 +B; xn--co6h.xn--1-h1gs; [V6]; [V6] +B; xn--co6h.xn--1-h1gs597m; [B6 C2 V6]; [B6 C2 V6] # .ႷႽ1 +T; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 +N; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 +T; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 +N; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 +B; 𑄴𑄳2.𞳿󠀳-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; xn--2-h87ic.xn----s39r33498d; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] +B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; xn--eib57614py3ea.xn--9mb5737kqnpfzkwr; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--4xa847hli46a; [B2 B6 V6]; [B2 B6 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--4xa847h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--3xa057h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 +N; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 +T; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 +N; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 +B; xn-----ujv.xn--fo0d; [B1 V3 V6]; [B1 V3 V6] +B; xn-----l1tz1k.xn--fo0d; [B1 C2 V6]; [B1 C2 V6] # --≮.𐹧 +B; \uA806。𻚏\u0FB0⒕; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ⒕ +B; \uA806。𻚏\u0FB014.; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ14. +B; xn--l98a.xn--14-jsj57880f.; [V5 V6]; [V5 V6] # ꠆.ྰ14. +B; xn--l98a.xn--dgd218hhp28d; [V5 V6]; [V5 V6] # ꠆.ྰ⒕ +B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ +B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ +B; xn--vkb92243l.xn--iib9797k; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڼ.𑆺٩ +B; 󠁎\u06D0-。𞤴; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 +B; 󠁎\u06D0-。𞤒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 +B; xn----mwc72685y.xn--se6h; [B1 V3 V6]; [B1 V3 V6] # ې-.𞤴 +T; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ +N; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ +T; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ +N; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ +B; xn--84-s850a.xn--59h6326e; 84𝈻.𐋵⛧; xn--84-s850a.xn--59h6326e; NV8 +B; 84𝈻.𐋵⛧; ; xn--84-s850a.xn--59h6326e; NV8 +B; xn--84-s850a.xn--1uga573cfq1w; [C2]; [C2] # 84𝈻.𐋵⛧ +B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ +B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ +B; xn----tkc.xn--68e; [B1 V3 V6]; [B1 V3 V6] # -.ᡪ +T; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] +N; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] +T; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] +N; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] +T; ≮7.謖ß≯; [P1 V6]; [P1 V6] +N; ≮7.謖ß≯; [P1 V6]; [P1 V6] +T; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] +N; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] +B; <\u03387.謖SS>\u0338; [P1 V6]; [P1 V6] +B; ≮7.謖SS≯; [P1 V6]; [P1 V6] +B; ≮7.謖ss≯; [P1 V6]; [P1 V6] +B; <\u03387.謖ss>\u0338; [P1 V6]; [P1 V6] +B; <\u03387.謖Ss>\u0338; [P1 V6]; [P1 V6] +B; ≮7.謖Ss≯; [P1 V6]; [P1 V6] +B; xn--7-mgo.xn--ss-xjvv174c; [V6]; [V6] +B; xn--7-mgo.xn--zca892oly5e; [V6]; [V6] +B; <\u0338𝟕.謖SS>\u0338; [P1 V6]; [P1 V6] +B; ≮𝟕.謖SS≯; [P1 V6]; [P1 V6] +B; ≮𝟕.謖ss≯; [P1 V6]; [P1 V6] +B; <\u0338𝟕.謖ss>\u0338; [P1 V6]; [P1 V6] +B; <\u0338𝟕.謖Ss>\u0338; [P1 V6]; [P1 V6] +B; ≮𝟕.謖Ss≯; [P1 V6]; [P1 V6] +B; 朶Ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- +B; 朶ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- +B; xn--0kjz47pd57t.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- +B; xn--hnd7245bd56p.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- +T; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ +N; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ +T; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ +N; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ +B; xn--bk9c.xn--gdhx6802k; [B6 V6]; [B6 V6] +B; xn--bk9c.xn--0ugc04p2u638c; [B6 C1 C2 V6]; [B6 C1 C2 V6] # 𐤎.≮ +T; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [P1 V6] # ⒈.2 +N; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [C1 P1 V6] # ⒈.2 +T; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..2 +N; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..2 +B; xn--1-ex54e..2; [V6 A4_2]; [V6 A4_2] +B; xn--1-ex54e..xn--2-rgn; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..2 +B; xn--tsh94183d.2; [V6]; [V6] +B; xn--tsh94183d.xn--2-rgn; [C1 V6]; [C1 V6] # ⒈.2 +T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 +N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 +T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 +N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 +B; xn--co0d98977c.xn--ro0dga22807v; [B1 V6]; [B1 V6] +B; xn--1ugy994g7k93g.xn--ro0dga22807v; [B1 C2 V6]; [B1 C2 V6] # 𐹤.𐹳𐹶 +B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; xn--609c96c09grp2w.xn--n3b28708s; [B1 V5 V6]; [B1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 +B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 +B; xn--hib.xn--7n0d2bu9196b; [B1 V6]; [B1 V6] # ٨.𐹠𐹽 +B; \u1160񍀜.8򶾵\u069C; [B1 P1 V6]; [B1 P1 V6] # .8ڜ +B; xn--psd85033d.xn--8-otc61545t; [B1 V6]; [B1 V6] # .8ڜ +T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 +N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 +N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +B; .xn--ss-bh7o; [A4_2]; [A4_2] +B; xn--0ugb.xn--ss-bh7o; [C1 C2]; [C1 C2] # .ss𑓃 +B; xn--0ugb.xn--zca0732l; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +B; xn--ss-bh7o; ss𑓃; xn--ss-bh7o +B; ss𑓃; ; xn--ss-bh7o +B; SS𑓃; ss𑓃; xn--ss-bh7o +B; Ss𑓃; ss𑓃; xn--ss-bh7o +T; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [P1 V6] # ︒ヶ䒩.ꡪ +N; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [C1 P1 V6] # ︒ヶ䒩.ꡪ +T; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [A4_2] # .ヶ䒩.ꡪ +N; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ +B; .xn--qekw60d.xn--gd9a; [A4_2]; [A4_2] +B; .xn--0ug287dj0o.xn--gd9a; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ +B; xn--qekw60dns9k.xn--gd9a; [V6]; [V6] +B; xn--0ug287dj0or48o.xn--gd9a; [C1 V6]; [C1 V6] # ︒ヶ䒩.ꡪ +B; xn--qekw60d.xn--gd9a; ヶ䒩.ꡪ; xn--qekw60d.xn--gd9a +B; ヶ䒩.ꡪ; ; xn--qekw60d.xn--gd9a +T; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # ⒈𤮍.᩠ +N; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # ⒈𤮍.᩠ +T; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # 1.𤮍.᩠ +N; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # 1.𤮍.᩠ +B; 1.xn--4x6j.xn--jof45148n; [V6]; [V6] # 1.𤮍.᩠ +B; xn--1-rgn.xn--4x6j.xn--jof45148n; [C1 V6]; [C1 V6] # 1.𤮍.᩠ +B; xn--tshw462r.xn--jof45148n; [V6]; [V6] # ⒈𤮍.᩠ +B; xn--0ug88o7471d.xn--jof45148n; [C1 V6]; [C1 V6] # ⒈𤮍.᩠ +T; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ⒈𐫓.᩠ +N; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # ⒈𐫓.᩠ +T; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 B3 P1 V5 V6] # 1.𐫓.᩠ +N; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # 1.𐫓.᩠ +B; 1.xn--8w9c40377c.xn--jofz5294e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 1.𐫓.᩠ +B; 1.xn--0ug8853gk263g.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # 1.𐫓.᩠ +B; xn--tsh4435fk263g.xn--jofz5294e; [B1 V5 V6]; [B1 V5 V6] # ⒈𐫓.᩠ +B; xn--0ug78ol75wzcx4i.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # ⒈𐫓.᩠ +B; 𝅵。𝟫𞀈䬺⒈; [P1 V6]; [P1 V6] +B; 𝅵。9𞀈䬺1.; [P1 V6]; [P1 V6] +B; xn--3f1h.xn--91-030c1650n.; [V6]; [V6] +B; xn--3f1h.xn--9-ecp936non25a; [V6]; [V6] +B; 򡼺≯。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص +B; 򡼺>\u0338。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص +B; xn--hdh30181h.xn--0gb7878c; [B5 B6 V6]; [B5 B6 V6] # ≯.盚ص +B; -񿰭\u05B4。-󠁊𐢸≯; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ +B; -񿰭\u05B4。-󠁊𐢸>\u0338; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ +B; xn----fgc06667m.xn----pgoy615he5y4i; [B1 V3 V6]; [B1 V3 V6] # -ִ.-≯ +T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 +N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 +T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 +N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 +B; xn--ybc997fb5881a.xn--409c6100y; [B2 B3 V6]; [B2 B3 V6] # ᭄੍.𐭛 +B; xn--ybc997f6rd2n772c.xn--409c6100y; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ᭄੍.𐭛 +T; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +B; xn--8c1a.xn--2ib8jn539l; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; 舛.\u067D𞤴\u06BB; ; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; 舛.\u067D𞤒\u06BB; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; xn--8c1a.xn--2ib8jv19e6413b; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +B; 4򭆥。\u0767≯; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ +B; 4򭆥。\u0767>\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ +B; xn--4-xn17i.xn--rpb459k; [B1 B3 V6]; [B1 B3 V6] # 4.ݧ≯ +B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ +B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ +B; xn--lcz1610fn78gk609a.xn--gkb; [B5 V6]; [B5 V6] # 硲.ڭ +T; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 P1 V6 A4_2] # .٦Ⴆ℮ +N; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .٦Ⴆ℮ +T; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 A4_2] # .٦ⴆ℮ +N; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 C1] # .٦ⴆ℮ +B; .xn--fib628k4li; [B1 A4_2]; [B1 A4_2] # .٦ⴆ℮ +B; xn--0ug.xn--fib628k4li; [B1 C1]; [B1 C1] # .٦ⴆ℮ +B; .xn--fib263c0yn; [B1 V6 A4_2]; [B1 V6 A4_2] # .٦Ⴆ℮ +B; xn--0ug.xn--fib263c0yn; [B1 C1 V6]; [B1 C1 V6] # .٦Ⴆ℮ +T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +B; xn--5jb.xn--xya149b; [B1 V5]; [B1 V5] # ڣ.്ϟ +B; xn--5jb.xn--xya149bpvp; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +B; xn--sgb4140l.xn----qmc5075grs9e; [B2 B3]; [B2 B3] # ح𑘿.أ𐮂-腍 +B; xn--sgb953kmi8o.xn----qmc5075grs9e; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +B; -򭷙\u066B纛。𝟛񭤇🄅; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.3🄅 +B; -򭷙\u066B纛。3񭤇4,; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, +B; xn----vqc8143g0tt4i.xn--34,-8787l; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, +B; xn----vqc8143g0tt4i.xn--3-os1sn476y; [B1 V3 V6]; [B1 V3 V6] # -٫纛.3🄅 +B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; xn--nv8h.xn--nsb46rvz1b222p; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; xn--nv8h.xn--nsb46r83e8112a; [B1 B5 V6]; [B1 B5 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; 軥\u06B3.-𖬵; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 +B; xn--mkb5480e.xn----6u5m; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 +B; 𐹤\u07CA\u06B6.𐨂-; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- +B; xn--pkb56cn614d.xn----974i; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -0.xn--r4e872ah77nghm; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; ꡰ︒--。\u17CC靈𐹢񘳮; [B1 B6 P1 V2 V3 V5 V6]; [B1 B6 P1 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 +B; ꡰ。--。\u17CC靈𐹢񘳮; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ꡰ.--.៌靈𐹢 +B; xn--md9a.--.xn--o4e6836dpxudz0v1c; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ꡰ.--.៌靈𐹢 +B; xn-----bk9hu24z.xn--o4e6836dpxudz0v1c; [B1 B6 V2 V3 V5 V6]; [B1 B6 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 +B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ +B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ +B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ +B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ +B; xn--3nd0etsm92g.xn--9ic; [V5 V6]; [V5 V6] # Ⴟⴕრ.୍ +B; xn--1od7wz74eeb.xn--9ic; [V5 V6]; [V5 V6] # ⴟⴕრ.୍ +B; xn--tndt4hvw.xn--9ic; [V5 V6]; [V5 V6] # ႿႵრ.୍ +B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ +B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ +B; 🄃𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 🄃𐹠.٤ +B; 2,𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ +B; xn--2,-5g3o.xn--dib; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ +B; xn--7n0d1189a.xn--dib; [B1 V6]; [B1 V6] # 🄃𐹠.٤ +T; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ +N; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ +T; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ +N; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ +B; xn--vgb2kp1223g.xn--tsb0vz43c; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ذٰ.ߒࡈ᯳ +B; xn--vgb2kq00fl213y.xn--tsb0vz43c; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # ذٰ.ߒࡈ᯳ +T; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [P1 V6] # .ᡘ𑲭 +N; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # .ᡘ𑲭 +B; xn--l96h.xn--03e93aq365d; [V6]; [V6] # .ᡘ𑲭 +B; xn--0ugba05538b.xn--03e93aq365d; [B1 C1 C2 V6]; [B1 C1 C2 V6] # .ᡘ𑲭 +B; 𞷻。⚄񗑇𑁿; [B1 P1 V6]; [B1 P1 V6] +B; xn--qe7h.xn--c7h2966f7so4a; [B1 V6]; [B1 V6] +B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; xn--1chy504c.xn--gib1777v; [B1 V5 V6]; [B1 V5 V6] # ꣄≠.𞠨٧ +B; 𝟛𝆪\uA8C4。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; 𝟛\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; 3\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; xn--3-sl4eu679e.xn----xn4e; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; \u075F\u1BA2\u103AႧ.4; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ݟᮢ်Ⴇ.4 +B; \u075F\u1BA2\u103Aⴇ.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 +B; xn--jpb846bjzj7pr.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 +B; xn--jpb846bmjw88a.4; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ݟᮢ်Ⴇ.4 +B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ +B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ +B; xn--lrd.xn--s8c05302k; [V5 V6]; [V5 V6] # ᄹ.໊ +B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] +B; xn--xkjw3965g.xn--ne6h; [V6]; [V6] +B; xn--end82983m.xn--ne6h; [V6]; [V6] +B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +T; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +N; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ +N; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ +N; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +T; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +T; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +B; xn--ss-uze.xn--0ib.xn--f-tcoa9162d; [B5 B6]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +B; xn--ss-uze.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +B; xn--zca687a.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +N; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +T; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +T; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--ss-k0d31nu121d.xn--f-tcoa9162d; [B5 B6 V6]; [B5 B6 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--ss-k0d31nu121d.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--zca68zj8ac956c.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 +N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 +T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 +N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 +B; xn--8l83e.xn--so0dw168a; [B1 V6]; [B1 V6] +B; xn--8l83e.xn--1ug4105gsxwf; [B1 C2 V6]; [B1 C2 V6] # .𐹴 +B; 񗛨.򅟢𝟨\uA8C4; [P1 V6]; [P1 V6] # .6꣄ +B; 񗛨.򅟢6\uA8C4; [P1 V6]; [P1 V6] # .6꣄ +B; xn--mi60a.xn--6-sl4es8023c; [V6]; [V6] # .6꣄ +B; \u1AB2\uFD8E。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\u0645\u062E\u062C。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\u0645\u062E\u062C。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; \u1AB2\u0645\u062E\u062C。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; xn--rgbd2e831i.xn----zyc875efr3a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; xn--rgbd2e831i.xn----zyc3430a9a; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; xn--rgbd2e831i.xn----zyc155e9a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\uFD8E。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; \u1AB2\uFD8E。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; 𞤤.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ +B; 𞤤.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; 𞤂.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; xn--ce6h.xn----cod.; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; 𞤂.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ +B; xn--ce6h.xn----cod7069p; [B1 V3 V6]; [B1 V3 V6] # 𞤤.-ࢣ︒ +T; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # .࡙-- +N; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 C1 P1 V3 V5 V6] # .࡙-- +B; xn--9p0d.xn-----h6e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # .࡙-- +B; xn--0ug7905g.xn-----h6e; [B1 C1 V3 V5 V6]; [B1 C1 V3 V5 V6] # .࡙-- +B; 𐋸󮘋Ⴢ.Ⴁ; [P1 V6]; [P1 V6] +B; 𐋸󮘋ⴢ.ⴁ; [P1 V6]; [P1 V6] +B; 𐋸󮘋Ⴢ.ⴁ; [P1 V6]; [P1 V6] +B; xn--6nd5215jr2u0h.xn--skj; [V6]; [V6] +B; xn--qlj1559dr224h.xn--skj; [V6]; [V6] +B; xn--6nd5215jr2u0h.xn--8md; [V6]; [V6] +T; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +N; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +T; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +N; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +B; 񗑿\uA8064򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 񗑿\uA8064򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; xn--4-w93ej7463a9io5a.xn--4xa31142bk3f0d; [V6]; [V6] # ꠆4.σ +B; xn--4-w93ej7463a9io5a.xn--3xa51142bk3f0d; [V6]; [V6] # ꠆4.ς +B; 񗑿\uA806₄򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 񗑿\uA806₄򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 󠆀\u0723。\u1DF4\u0775; [B1 V5]; [B1 V5] # ܣ.ᷴݵ +B; xn--tnb.xn--5pb136i; [B1 V5]; [B1 V5] # ܣ.ᷴݵ +T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--8kjz186s; [B1]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--1ug742c5714c; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--pnd93707a; [B1 V6]; [B1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +B; xn--0vb1535kdb6e.xn--pnd879eqy33c; [B1 B6 C2 V6]; [B1 B6 C2 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +B; xn--fze4126jujt0g.xn----c36i; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᜔𐭪.-𐹴 +B; xn--fze807bso0spy14i.xn----c36i; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # ᜔𐭪.-𐹴 +B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 +B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 +B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 +B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 +B; xn--t92s.xn--znb.xn--5-y88f; [V6]; [V6] # .ܩ.쯙5 +B; xn--t92s.xn--5-p1c0712mm8rb; [B2 V6]; [B2 V6] # .ܩ︒쯙5 +B; 𞤟-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞤟-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥁-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥁-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; xn----1j8r.xn--mpb269krv4i; [B2 B3 V3 V6]; [B2 B3 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; xn----cm8rp3609a.xn--9yb852k; [B2 B3 V6]; [B2 B3 V6] # -.ࢴ≠ +T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ +B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; xn----0mb9682aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σⴜ.١ +B; xn----0mb770hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σႼ.١ +B; xn----ymb2782aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςⴜ.١ +B; xn----ymb080hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςႼ.١ +T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ +B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +T; \u17CA.\u200D𝟮𑀿; [C2 V5]; [V5] # ៊.2𑀿 +N; \u17CA.\u200D𝟮𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 +T; \u17CA.\u200D2𑀿; [C2 V5]; [V5] # ៊.2𑀿 +N; \u17CA.\u200D2𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 +B; xn--m4e.xn--2-ku7i; [V5]; [V5] # ៊.2𑀿 +B; xn--m4e.xn--2-tgnv469h; [C2 V5]; [C2 V5] # ៊.2𑀿 +B; ≯𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; >\u0338𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; ≯8。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; >\u03388。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; xn--8-ogo.xn--jof5303iv1z5d; [B1 V5 V6]; [B1 V5 V6] # ≯8.᩠𐫓 +T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. +N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. +N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. +N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +B; xn--dib100l8x1p.; [B1 V5]; [B1 V5] # 𑲫ↄ٤. +B; xn--dib100l8x1p.xn--0ug; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +B; xn--dib999kcy1p.; [B1 V5 V6]; [B1 V5 V6] # 𑲫Ↄ٤. +B; xn--dib999kcy1p.xn--0ug; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. +N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +T; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ఀ9بح.ݐ⒈ +N; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ఀ9بح.ݐ⒈ +T; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 V5] # ఀ9بح.ݐ1. +N; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570d.xn--1-x3c.; [B1 V5]; [B1 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570dl51a.xn--1-x3c211q.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570d.xn--3ob470m; [B1 V5 V6]; [B1 V5 V6] # ఀ9بح.ݐ⒈ +B; xn--9-1mcp570dl51a.xn--3ob977jmfd; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ఀ9بح.ݐ⒈ +T; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 +N; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 +B; \uAAF6。嬶SS葽; [V5]; [V5] # ꫶.嬶ss葽 +B; \uAAF6。嬶ss葽; [V5]; [V5] # ꫶.嬶ss葽 +B; \uAAF6。嬶Ss葽; [V5]; [V5] # ꫶.嬶ss葽 +B; xn--2v9a.xn--ss-q40dp97m; [V5]; [V5] # ꫶.嬶ss葽 +B; xn--2v9a.xn--zca7637b14za; [V5]; [V5] # ꫶.嬶ß葽 +B; 𑚶⒈。񞻡𐹺; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] +B; 𑚶1.。񞻡𐹺; [B5 B6 P1 V5 V6 A4_2]; [B5 B6 P1 V5 V6 A4_2] +B; xn--1-3j0j..xn--yo0d5914s; [B5 B6 V5 V6 A4_2]; [B5 B6 V5 V6 A4_2] +B; xn--tshz969f.xn--yo0d5914s; [B5 B6 V5 V6]; [B5 B6 V5 V6] +B; 𑜤︒≮.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט +B; 𑜤︒<\u0338.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט +B; 𑜤。≮.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט +B; 𑜤。<\u0338.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט +B; xn--ci2d.xn--gdh.xn--deb0091w5q9u; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑜤.≮.ט +B; xn--gdh5267fdzpa.xn--deb0091w5q9u; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𑜤︒≮.ט +T; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς +N; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς +B; 󠆋\u0603񏦤.⇁Σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ +B; 󠆋\u0603񏦤.⇁σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ +B; xn--lfb04106d.xn--4xa964mxv16m8moq; [B1 V6]; [B1 V6] # .⇁σ +B; xn--lfb04106d.xn--3xa174mxv16m8moq; [B1 V6]; [B1 V6] # .⇁ς +T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 +N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 +T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 +N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 +T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +B; xn--4xa2260lk3b8z15g.xn--tw9ct349a; [V6]; [V6] +B; xn--4xa2260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # σ𑐽𑜫.𐫄 +B; xn--3xa4260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # ς𑐽𑜫.𐫄 +T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +B; -򵏽。-\uFC4C\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ +B; -򵏽。-\u0646\u062D\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ +B; xn----o452j.xn----cnc8e38c; [B1 V3 V6]; [B1 V3 V6] # -.-نحݛ +T; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 +N; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 +T; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 +N; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 +B; xn--2-4jtr4282f.xn--m78h; [V6]; [V6] +B; xn--2-4jtr4282f.xn--1ugz946p; [C2 V6]; [C2 V6] # ⺢2.🚷 +T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. +N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. +T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. +N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. +B; xn--hvc220of37m.xn--3e36c; [B5 B6 V6]; [B5 B6 V6] # ⷾ𐹲. +B; xn--hvc488g69j402t.xn--3e36c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # ⷾ𐹲. +B; 𐹢.Ⴍ₉⁸; [B1 P1 V6]; [B1 P1 V6] +B; 𐹢.Ⴍ98; [B1 P1 V6]; [B1 P1 V6] +B; 𐹢.ⴍ98; [B1]; [B1] +B; xn--9n0d.xn--98-u61a; [B1]; [B1] +B; xn--9n0d.xn--98-7ek; [B1 V6]; [B1 V6] +B; 𐹢.ⴍ₉⁸; [B1]; [B1] +T; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ +N; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ +T; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ +N; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ +T; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ +N; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ +T; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ +N; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ +T; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +B; .xn--ss19-w0i.xn--hdh; [B1 B5 V6 A4_2]; [B1 B5 V6 A4_2] # .ss19.≯ +B; xn--0ug.xn--ss19-w0i.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ss19.≯ +B; xn--0ug.xn--19-fia813f.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ß19.≯ +T; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +B; .xn--ss-9if872xjjc; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ss⒚≯ +B; xn--0ug.xn--ss-9if872xjjc; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ss⒚≯ +B; xn--0ug.xn--zca612bx9vo5b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ß⒚≯ +T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 +N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 +T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 +N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 +B; xn--c8e5919u.xn--od1j; [B2 B3 V6]; [B2 B3 V6] +B; xn--c8e180bqz13b.xn--od1j; [B1 C1 V6]; [B1 C1 V6] # ᡌ.𣃔 +B; \u07D0򜬝-񡢬。\u0FA0Ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠႻ𝆬 +B; \u07D0򜬝-񡢬。\u0FA0ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠⴛ𝆬 +B; xn----8bd11730jefvw.xn--wfd802mpm20agsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠⴛ𝆬 +B; xn----8bd11730jefvw.xn--wfd08cd265hgsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠႻ𝆬 +B; 𝨥。⫟𑈾; [V5]; [V5] +B; xn--n82h.xn--63iw010f; [V5]; [V5] +T; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 +N; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 +T; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 +N; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 +T; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 +N; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb511rxu80a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb444k5gjt754b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb785ck569a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.Ⴕ𞠬 +B; xn--6ob9779d.xn--mfb785czmm0y85b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.Ⴕ𞠬 +T; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 +N; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 +T; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ🄄.𑜢 +N; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ🄄.𑜢 +T; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ3,.𑜢 +N; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 +B; xn---3,-3eu.xn--9h2d; [P1 V3 V5 V6]; [P1 V3 V5 V6] +B; xn---3,-3eu051c.xn--9h2d; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 +B; xn----pck1820x.xn--9h2d; [V3 V5 V6]; [V3 V5 V6] +B; xn----pck312bx563c.xn--9h2d; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -ᢗ🄄.𑜢 +T; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ +N; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ +T; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ +N; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ +T; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ +N; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ +T; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ +N; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ +B; xn--1ch2293gv3nr.xn--ilj23531g; [B1 V6]; [B1 V6] +B; xn--0ug83gn618a21ov.xn--ilj23531g; [B1 C1 V6]; [B1 C1 V6] # ≠.ⴚ +B; xn--1ch2293gv3nr.xn--ynd49496l; [B1 V6]; [B1 V6] +B; xn--0ug83gn618a21ov.xn--ynd49496l; [B1 C1 V6]; [B1 C1 V6] # ≠.Ⴚ +B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; xn--iib.xn--6d1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; \u1086𞶀≯⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- +B; \u1086𞶀>\u0338⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- +B; \u1086𞶀≯6.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- +B; \u1086𞶀>\u03386.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- +B; xn--6-oyg968k7h74b..-; [B1 V3 V5 V6 A4_2]; [B1 V3 V5 V6 A4_2] # ႆ≯6..- +B; xn--hmd482gqqb8730g.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ႆ≯⒍.- +B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- +B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- +B; xn--z3e.xn----938f; [V3 V5 V6]; [V3 V5 V6] # .쮇- +T; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [P1 V5 V6] # 𑓂.⒈- +N; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [C1 P1 V6] # 𑓂.⒈- +T; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [P1 V3 V5 V6] # 𑓂.1.- +N; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [C1 P1 V3 V6] # 𑓂.1.- +B; xn--wz1d.1.xn----rg03o; [V3 V5 V6]; [V3 V5 V6] +B; xn--0ugy057g.1.xn----rg03o; [C1 V3 V6]; [C1 V3 V6] # 𑓂.1.- +B; xn--wz1d.xn----dcp29674o; [V5 V6]; [V5 V6] +B; xn--0ugy057g.xn----dcp29674o; [C1 V6]; [C1 V6] # 𑓂.⒈- +T; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 +N; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 +T; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 +N; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 +B; 1.xn--wgb.xn--c1g6021kg18c; [B1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 +B; 1.xn--wgb253k.xn--0ugz6a8040fty5d; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 +B; xn--wgb746m.xn--c1g6021kg18c; [B1 V5 V6]; [B1 V5 V6] # ⒈ر.⃩🖞𖬴 +B; xn--wgb253kmfd.xn--0ugz6a8040fty5d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈ر.⃩🖞𖬴 +B; 󌭇。𝟐\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ +B; 󌭇。2\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ +B; xn--xm89d.xn--2-icd143m; [B1 V6]; [B1 V6] # .2ᮨߔ +T; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 +N; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 +T; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 +N; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 +T; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +T; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--4xa6667k; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--4xa895lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--3xa006lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.ς𐹷 +T; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +T; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +B; ⒎\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒎ہ.꫶۵𐇽 +B; 7.\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 7.ہ.꫶۵𐇽 +B; 7.xn--nfb98a.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # 7.ہ.꫶۵𐇽 +B; xn--nfb98ai25e.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # ⒎ہ.꫶۵𐇽 +B; -ᡥ᠆󍲭。\u0605\u1A5D𐹡; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ᡥ᠆.ᩝ𐹡 +B; xn----f3j6s87156i.xn--nfb035hoo2p; [B1 V3 V6]; [B1 V3 V6] # -ᡥ᠆.ᩝ𐹡 +T; \u200D.\u06BD\u0663\u0596; [B1 C2]; [A4_2] # .ڽ٣֖ +N; \u200D.\u06BD\u0663\u0596; [B1 C2]; [B1 C2] # .ڽ٣֖ +B; .xn--hcb32bni; [A4_2]; [A4_2] # .ڽ٣֖ +B; xn--1ug.xn--hcb32bni; [B1 C2]; [B1 C2] # .ڽ٣֖ +B; xn--hcb32bni; \u06BD\u0663\u0596; xn--hcb32bni # ڽ٣֖ +B; \u06BD\u0663\u0596; ; xn--hcb32bni # ڽ٣֖ +T; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ +N; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ +N; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ +N; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8fy26k; [B5 B6]; [B5 B6] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8f960g03l; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8f817a; [B5 B6 V6]; [B5 B6 V6] # 㒧۱.Ⴚيٴ +B; xn--emb715u.xn--mhb8f817ao2p; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ +N; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +B; xn----ukg9938i.xn----4u5m; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +B; xn----9mo67451g.xn----qj7b; [V3 V6]; [V3 V6] +B; xn----sgn90kn5663a.xn----qj7b; [C1 V3 V6]; [C1 V3 V6] # -⋢.标- +T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; xn--qib.xn--4xa21s; [B5 B6]; [B5 B6] # ٱ.σߜ +B; xn--qib.xn--3xa41s; [B5 B6]; [B5 B6] # ٱ.ςߜ +B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 +N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 +T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 +N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 +B; xn--nfb17942h.xn--nzb6708kx3pn; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .𑑂 +B; xn--nfb17942h.xn--nzb240jv06otevq; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # .𑑂 +B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ +B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ +B; xn--d97cn8rn44p.xn--0zf; [B1 V5 V6]; [B1 V5 V6] # 𐹾𐋩.᯲ +T; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς +N; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς +B; 6\u1160\u1C33󠸧.򟜊锰\u072CΣ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ +B; 6\u1160\u1C33󠸧.򟜊锰\u072Cσ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ +B; xn--6-5bh476ewr517a.xn--4xa95ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬσ +B; xn--6-5bh476ewr517a.xn--3xa16ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬς +B; \u06B3\uFE04񅎦𝟽。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 +B; \u06B3\uFE04񅎦7。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 +B; xn--7-yuc34665f.xn--1o0d; [B1 B2 V6]; [B1 B2 V6] # ڳ7.𐹽 +T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ +N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ +T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ +N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ +B; xn--pw6h.xn--53i; [B1 V6]; [B1 V6] +B; xn--pw6h.xn--0ug283b; [B1 C1 V6]; [B1 C1 V6] # .⫞ +B; -񕉴.\u06E0ᢚ-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.۠ᢚ- +B; xn----qi38c.xn----jxc827k; [V3 V5 V6]; [V3 V5 V6] # -.۠ᢚ- +T; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ +N; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +T; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ +N; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +B; xn--nhh5394g.xn--7-xqc; [B1]; [B1] # ⌁𑄴.7٬ +B; xn--1ug38i2093a.xn--7-xqc297q; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +B; ︒\uFD05\u0E37\uFEFC。岓\u1BF2󠾃ᡂ; [B1 P1 V6]; [B1 P1 V6] # ︒صىืلا.岓᯲ᡂ +B; 。\u0635\u0649\u0E37\u0644\u0627。岓\u1BF2󠾃ᡂ; [P1 V6 A4_2]; [P1 V6 A4_2] # .صىืلا.岓᯲ᡂ +B; .xn--mgb1a7bt462h.xn--17e10qe61f9r71s; [V6 A4_2]; [V6 A4_2] # .صىืلا.岓᯲ᡂ +B; xn--mgb1a7bt462hf267a.xn--17e10qe61f9r71s; [B1 V6]; [B1 V6] # ︒صىืلا.岓᯲ᡂ +B; 𐹨。8𑁆; [B1]; [B1] +B; xn--go0d.xn--8-yu7i; [B1]; [B1] +B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; xn--mxc5210v.xn--90b01t8u2p1ltd; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. +B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. +B; xn--nsa1265kp9z9e.xn--xt36e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ̃. +B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ +B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ +B; xn--59e.xn----5jd; [V3]; [V3] # ᢌ.-࡚ +B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் +B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் +B; xn--jb2dj685c.xn--xmc5562kmcb; [B1 V6]; [B1 V6] # 𥛛𑘶.𐹬் +T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. +N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. +T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. +N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. +T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. +N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +B; xn--gqb743q.; [B5 B6]; [B5 B6] # ⴐݿ. +B; xn--gqb743q.xn--0ug; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +B; xn--gqb918b.; [B5 B6 V6]; [B5 B6 V6] # Ⴐݿ. +B; xn--gqb918b.xn--0ug; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴐݿ. +T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. +N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +T; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 B5 B6 P1 V6] # 🄅𑲞-⒈.ᠩڥ +N; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🄅𑲞-⒈.ᠩڥ +T; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +N; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn--4,-1-w401a..xn--7jb180g; [B1 B5 B6 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn--4,-1-w401a..xn--7jb180gexf; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn----ecp8796hjtvg.xn--7jb180g; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 🄅𑲞-⒈.ᠩڥ +B; xn----ecp8796hjtvg.xn--7jb180gexf; [B1 C2 V6]; [B1 C2 V6] # 🄅𑲞-⒈.ᠩڥ +B; 񗀤。𞤪򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 񗀤。𞤈򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4240a.xn--ie6h83808a; [B2 B3 V6]; [B2 B3 V6] +B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; xn--pdb42d.xn--lib6412enztdwv6h; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; 𞭳-ꡁ。\u1A69\u0BCD-; [B1 B2 B3 P1 V3 V5 V6]; [B1 B2 B3 P1 V3 V5 V6] # -ꡁ.ᩩ்- +B; xn----be4e4276f.xn----lze333i; [B1 B2 B3 V3 V5 V6]; [B1 B2 B3 V3 V5 V6] # -ꡁ.ᩩ்- +T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; xn----9tg11172akr8b.ss; [V5 V6]; [V5 V6] # ္-🞢.ss +B; xn----9tg11172akr8b.xn--zca; [V5 V6]; [V5 V6] # ္-🞢.ß +B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +T; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ +N; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ +T; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ +N; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ +T; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ +N; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +B; xn----eoc6bm.xn--xph904a; [B3 B6 V3]; [B3 B6 V3] # ـَّ-.ⴟ␣ +B; xn----eoc6bm0504a.xn--0ug13nd0j; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +B; xn----eoc6bm.xn--3nd240h; [B3 B6 V3 V6]; [B3 B6 V3 V6] # ـَّ-.Ⴟ␣ +B; xn----eoc6bm0504a.xn--3nd849e05c; [B3 B6 C1 V6]; [B3 B6 C1 V6] # ـَّ-.Ⴟ␣ +T; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ +N; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +T; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +B; xn----jmf.xn--5-ufo50192e; [V3 V5 V6]; [V3 V5 V6] # ്-.5≠ +B; xn----jmf215lda.xn--5-ufo50192e; [C1 C2 V5 V6]; [C1 C2 V5 V6] # ്-.5≠ +B; 锣。\u0A4D󠘻󠚆; [P1 V5 V6]; [P1 V5 V6] # 锣.੍ +B; xn--gc5a.xn--ybc83044ppga; [V5 V6]; [V5 V6] # 锣.੍ +T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +B; xn--8gb2338k.xn--lhb0154f; \u063D𑈾.\u0649\uA92B; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +B; \u063D𑈾.\u0649\uA92B; ; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +B; xn--8gb2338k.xn--lhb603k060h; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +T; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ +N; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ +T; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ +N; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ +T; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ +N; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +B; xn--4-kqc6770a.xn--jzb; [B1]; [B1] # ٦4ⴅ.ࢽ +B; xn--4-kqc6770a.xn--jzb840j; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +B; xn--4-kqc489e.xn--jzb; [B1 V6]; [B1 V6] # ٦4Ⴅ.ࢽ +B; xn--4-kqc489e.xn--jzb840j; [B1 B3 C1 V6]; [B1 B3 C1 V6] # ٦4Ⴅ.ࢽ +T; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ +N; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +T; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ +N; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ +T; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ +N; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +B; ჁႱ6\u0318。SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ +B; ⴡⴑ6\u0318。ss\u1B03; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; Ⴡⴑ6\u0318。Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb306hms1a.xn--ss-2vq; [V6]; [V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb7433a2ba.xn--ss-2vq; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; ⴡⴑ6\u0318.ss\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; ჁႱ6\u0318.SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ +B; Ⴡⴑ6\u0318.Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb555h2b.xn--ss-2vq; [V6]; [V6] # ჁႱ6̘.ssᬃ +B; xn--6-8cb7433a2ba.xn--zca894k; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +T; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ +N; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +B; xn--6-8cb555h2b.xn--zca894k; [V6]; [V6] # ჁႱ6̘.ßᬃ +B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] +B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] +B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] +B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] +B; xn--eo08b.xn--hdh3385g; [V6]; [V6] +T; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 P1 V3 V5 V6] # ٚ۲.-᯳ +N; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ٚ۲.-᯳ +B; xn--2hb81a.xn----xrd657l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٚ۲.-᯳ +B; xn--2hb81a.xn----xrd657l30d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ٚ۲.-᯳ +B; 󠄏𖬴󠲽。\uFFA0; [P1 V5 V6]; [P1 V5 V6] # 𖬴. +B; 󠄏𖬴󠲽。\u1160; [P1 V5 V6]; [P1 V5 V6] # 𖬴. +B; xn--619ep9154c.xn--psd; [V5 V6]; [V5 V6] # 𖬴. +B; xn--619ep9154c.xn--cl7c; [V5 V6]; [V5 V6] # 𖬴. +T; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. +N; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. +T; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. +N; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. +B; SS1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; Ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; ss1.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ss1.ݠ. +B; xn--1-pfa.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ß1.ݠ. +B; SS⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; Ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; xn--ss-6ke9690a0g1q.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ss⒈ݠ. +B; xn--zca444a0s1ao12n.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ß⒈ݠ. +B; 󠭔.𐋱₂; [P1 V6]; [P1 V6] +B; 󠭔.𐋱2; [P1 V6]; [P1 V6] +B; xn--vi56e.xn--2-w91i; [V6]; [V6] +T; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ßڥ +N; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ +T; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +T; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +T; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn---ss-4ef; [B1 V3]; [B1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn---ss-4ef9263a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn----qfa845bhx4a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911izv33i.xn--i6f270etuy; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911imgdrw34r.xn--i6f270etuy; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911izv33i.xn--5nd792dgv3b; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉Ⴡ +B; xn--pgb911imgdrw34r.xn--5nd792dgv3b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ +T; \u2DBF.ß\u200D; [C2 P1 V6]; [P1 V6] # .ß +N; \u2DBF.ß\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß +T; \u2DBF.SS\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.SS\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +T; \u2DBF.ss\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +T; \u2DBF.Ss\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.Ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +B; xn--7pj.ss; [V6]; [V6] # .ss +B; xn--7pj.xn--ss-n1t; [C2 V6]; [C2 V6] # .ss +B; xn--7pj.xn--zca870n; [C2 V6]; [C2 V6] # .ß +B; \u1BF3︒.\u062A≯ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ +B; \u1BF3︒.\u062A>\u0338ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ +B; \u1BF3。.\u062A≯ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; \u1BF3。.\u062A>\u0338ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; xn--1zf..xn--pgb885lry5g; [B2 B3 V5 V6 A4_2]; [B2 B3 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; xn--1zf8957g.xn--pgb885lry5g; [B2 B3 B6 V5 V6]; [B2 B3 B6 V5 V6] # ᯳︒.ت≯ꡂ +B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; xn--1ch1a29470f.xn----7uc5363rc1rn; [B1 V3 V6]; [B1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; 𐹡\u0777。ꡂ; [B1]; [B1] # 𐹡ݷ.ꡂ +B; xn--7pb5275k.xn--bc9a; [B1]; [B1] # 𐹡ݷ.ꡂ +T; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +N; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +T; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +N; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +B; Ⴉ𝆅񔻅\u0619.SS𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; ⴉ𝆅񔻅\u0619.ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ +B; Ⴉ𝆅񔻅\u0619.Ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb125cjv87a7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb940rwt3z7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb940rwt3z7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +B; xn--7fb125cjv87a7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +T; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B3 P1 V6] # ك𐧾↙. +N; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ك𐧾↙. +B; xn--fhb011lnp8n.xn--7s4w; [B3 V6]; [B3 V6] # ك𐧾↙. +B; xn--fhb713k87ag053c.xn--7s4w; [B1 C2 V6]; [B1 C2 V6] # ك𐧾↙. +T; 梉。\u200C; [C1]; xn--7zv. # 梉. +N; 梉。\u200C; [C1]; [C1] # 梉. +B; xn--7zv.; 梉.; xn--7zv. +B; 梉.; ; xn--7zv. +B; xn--7zv.xn--0ug; [C1]; [C1] # 梉. +T; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +N; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +N; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +B; xn----ufo9661d.xn--r5gy929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; xn----ufo9661d.xn--1ug99cj620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢ↄ +B; xn----ufo9661d.xn--q5g0929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; xn----ufo9661d.xn--1ug79cm620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +N; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +T; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +N; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; Σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--9-zmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--9-xmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; Σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +B; σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +B; xn--4xa809nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] +B; xn--3xa019nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] +T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß +N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß +N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss +B; xn--iwb.ss; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; \u0853.ss; ; xn--iwb.ss # ࡓ.ss +B; \u0853.SS; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; \u0853.Ss; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; xn--iwb.xn--ss-i1t; [B1 C1]; [B1 C1] # ࡓ.ss +B; xn--iwb.xn--zca570n; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss +T; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B3 B6 P1 V3 V6] # -.ݎꥍ +N; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # -.ݎꥍ +B; xn----s116e.xn--1ob6504fmf40i; [B3 B6 V3 V6]; [B3 B6 V3 V6] # -.ݎꥍ +B; xn----s116e.xn--1ob387jy90hq459k; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # -.ݎꥍ +B; 䃚蟥-。-񽒘⒈; [P1 V3 V6]; [P1 V3 V6] +B; 䃚蟥-。-񽒘1.; [P1 V3 V6]; [P1 V3 V6] +B; xn----n50a258u.xn---1-up07j.; [V3 V6]; [V3 V6] +B; xn----n50a258u.xn----ecp33805f; [V3 V6]; [V3 V6] +B; 𐹸䚵-ꡡ。⺇; [B1]; [B1] +B; xn----bm3an932a1l5d.xn--xvj; [B1]; [B1] +B; 𑄳。\u1ADC𐹻; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑄳.𐹻 +B; xn--v80d.xn--2rf1154i; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑄳.𐹻 +B; ≮𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں +B; <\u0338𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں +B; ≮𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں +B; <\u0338𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں +B; xn--gdhx904g.7.xn--kfb18an307d; [B1 V5 V6]; [B1 V5 V6] # ≮𐹻.7.𑂵ں +B; xn--gdhx904g.xn--kfb18a325efm3s; [B1 V6]; [B1 V6] # ≮𐹻.⒎𑂵ں +T; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 +N; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 +T; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 +N; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 +B; xn--ebf031cf7196a.xn--587c; [V6]; [V6] +B; xn--ebf031cf7196a.xn--1ug9540g; [C2 V6]; [C2 V6] # ᢔ≠.𐋢 +B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; xn--gdhc0519o0y27b.xn--lib468q0d21a; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐩁≮≯.٬⳿ +B; -。⺐; [V3]; [V3] +B; -。⺐; [V3]; [V3] +B; -.xn--6vj; [V3]; [V3] +B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ +B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ +B; xn--sn3d59267c.xn--4hb; [V5 V6]; [V5 V6] # 𑲬.ٜ +T; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # 𐍺. +N; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𐍺. +B; xn--ie8c.xn--2g51a; [V5 V6]; [V5 V6] +B; xn--ie8c.xn--0ug03366c; [C1 V5 V6]; [C1 V5 V6] # 𐍺. +B; \u063D\u06E3.𐨎; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 +B; xn--8gb64a.xn--mr9c; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 +T; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦ႹΣ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦Ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--4xa947d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--4xa772sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa972sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa157d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; 𐹫踧\u0CCD򫚇.󜀃⒈𝨤; [B1 P1 V6]; [B1 P1 V6] # 𐹫踧್.⒈𝨤 +B; 𐹫踧\u0CCD򫚇.󜀃1.𝨤; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹫踧್.1.𝨤 +B; xn--8tc1437dro0d6q06h.xn--1-p948l.xn--m82h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹫踧್.1.𝨤 +B; xn--8tc1437dro0d6q06h.xn--tsh2611ncu71e; [B1 V6]; [B1 V6] # 𐹫踧್.⒈𝨤 +T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +B; xn--gdh.xn----cr99a1w710b; [V3 V6]; [V3 V6] +B; xn--1ug95g.xn----cr99a1w710b; [C2 V3 V6]; [C2 V3 V6] # ≮.- +T; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.Ⴜ5ꡮ +N; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.Ⴜ5ꡮ +T; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.ⴜ5ꡮ +N; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.ⴜ5ꡮ +B; xn--2u2a.xn--5-uws5848bpf44e; [V6]; [V6] +B; xn--1uga7691f.xn--5-uws5848bpf44e; [C2 V6]; [C2 V6] # 襔.ⴜ5ꡮ +B; xn--2u2a.xn--5-r1g7167ipfw8d; [V6]; [V6] +B; xn--1uga7691f.xn--5-r1g7167ipfw8d; [C2 V6]; [C2 V6] # 襔.Ⴜ5ꡮ +T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 +N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 +N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +B; xn--ix9c26l.xn--q0s; 𐫜𑌼.婀; xn--ix9c26l.xn--q0s +B; 𐫜𑌼.婀; ; xn--ix9c26l.xn--q0s +B; xn--1ugx063g1if.xn--q0s; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +B; 󠅽︒︒𐹯。⬳\u1A78; [B1 P1 V6]; [B1 P1 V6] # ︒︒𐹯.⬳᩸ +B; 󠅽。。𐹯。⬳\u1A78; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ +B; ..xn--no0d.xn--7of309e; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ +B; xn--y86ca186j.xn--7of309e; [B1 V6]; [B1 V6] # ︒︒𐹯.⬳᩸ +T; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +N; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +N; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +N; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; 8SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 8Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 8SS.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8Ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; xn--8-qfa.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; XN--8-QFA.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +B; Xn--8-Qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +B; xn--8-qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +N; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; 𝟖SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 𝟖ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 𝟖Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +T; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣Ⴅ +N; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣Ⴅ +T; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣ⴅ +N; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣ⴅ +B; xn----s721m.xn--wkj1423e; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugnv7071n.xn--0ugz32cgr0p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣ⴅ +B; xn----s721m.xn--dnd9201k; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugnv7071n.xn--dnd999e4j4p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣Ⴅ +T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +B; xn--0m9as84e2e21c.2; [V5 V6]; [V5 V6] # ꦹ큷.2 +B; xn--1ug1435cfkyaoi04d.2; [C2 V5 V6]; [C2 V5 V6] # ꦹ큷.2 +B; \uDF4D.🄄𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.3,𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.xn--3,-tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.XN--3,-TB22A; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.Xn--3,-Tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.xn--3x6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.XN--3X6HX6F; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.Xn--3X6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; 𝨖𐩙。\u06DD󀡶\uA8C5⒈; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ⒈ +B; 𝨖𐩙。\u06DD󀡶\uA8C51.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ1. +B; xn--rt9cl956a.xn--1-dxc8545j0693i.; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ1. +B; xn--rt9cl956a.xn--tlb403mxv4g06s9i; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ⒈ +T; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.Ⴈ +N; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.Ⴈ +T; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.ⴈ +N; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--zkj; [B5 B6 V6]; [B5 B6 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--1ug232c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--gnd; [B5 B6 V6]; [B5 B6 V6] # סڸ.Ⴈ +B; xn--meb44b57607c.xn--gnd699e; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.Ⴈ +T; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +N; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 C1 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +T; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +N; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 C1 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--1-idd62296a1fr6e..xn--uj6at43v; [B1 B5 V5 V6 A4_2]; [B1 B5 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--1-idd62296a1fr6e..xn--0ugx259bocxd; [B1 B5 C1 V5 V6 A4_2]; [B1 B5 C1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--etb477lq931a1f58e.xn--uj6at43v; [B1 B5 V5 V6]; [B1 B5 V5 V6] # 𝨱ߦ⒈.𑗝髯 +B; xn--etb477lq931a1f58e.xn--0ugx259bocxd; [B1 B5 C1 V5 V6]; [B1 B5 C1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +B; 𐫀.\u0689𑌀; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; 𐫀.\u0689𑌀; ; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; xn--pw9c.xn--fjb8658k; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; xn--fm1d.xn--5c0d; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; ≠膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ +B; =\u0338膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ +B; xn--1chy468a.xn--2ed; [V5 V6]; [V5 V6] # ≠膣.ྃ +T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; xn----j6c95618k.ss; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ss +B; xn----j6c95618k.xn--zca; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ß +B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +T; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; Σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--4xa656hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa856hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] +B; Σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; 𐋷。\u200D; [C2]; xn--r97c. # 𐋷. +N; 𐋷。\u200D; [C2]; [C2] # 𐋷. +B; xn--r97c.; 𐋷.; xn--r97c.; NV8 +B; 𐋷.; ; xn--r97c.; NV8 +B; xn--r97c.xn--1ug; [C2]; [C2] # 𐋷. +B; 𑰳𑈯。⥪; [V5]; [V5] +B; xn--2g1d14o.xn--jti; [V5]; [V5] +T; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +N; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +N; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ +N; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb5123a; [V5 V6]; [V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb502tlsl; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb324h; [V5 V6]; [V5 V6] # 𑆀䁴.Ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb324h32o; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ +N; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ +T; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1Fⴔ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1FႴ.XN--1CHZ659F; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1Chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1Fⴔ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1FႴ.XN--1UG73GL146A; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1Ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode.rs new file mode 100644 index 000000000..67988e80c --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode.rs @@ -0,0 +1,65 @@ +// Copyright 2013 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use idna::punycode::{decode, encode_str}; +use rustc_serialize::json::{Json, Object}; +use test::TestFn; + +fn one_test(decoded: &str, encoded: &str) { + match decode(encoded) { + None => panic!("Decoding {} failed.", encoded), + Some(result) => { + let result = result.into_iter().collect::(); + assert!(result == decoded, + format!("Incorrect decoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", + encoded, result, decoded)) + } + } + + match encode_str(decoded) { + None => panic!("Encoding {} failed.", decoded), + Some(result) => { + assert!(result == encoded, + format!("Incorrect encoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", + decoded, result, encoded)) + } + } +} + +fn get_string<'a>(map: &'a Object, key: &str) -> &'a str { + match map.get(&key.to_string()) { + Some(&Json::String(ref s)) => s, + None => "", + _ => panic!(), + } +} + +pub fn collect_tests(add_test: &mut F) { + match Json::from_str(include_str!("punycode_tests.json")) { + Ok(Json::Array(tests)) => for (i, test) in tests.into_iter().enumerate() { + match test { + Json::Object(o) => { + let test_name = { + let desc = get_string(&o, "description"); + if desc.is_empty() { + format!("Punycode {}", i + 1) + } else { + format!("Punycode {}: {}", i + 1, desc) + } + }; + add_test(test_name, TestFn::dyn_test_fn(move || one_test( + get_string(&o, "decoded"), + get_string(&o, "encoded"), + ))) + } + _ => panic!(), + } + }, + other => panic!("{:?}", other) + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode_tests.json b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode_tests.json new file mode 100644 index 000000000..86785b124 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/punycode_tests.json @@ -0,0 +1,120 @@ +[ +{ + "description": "These tests are copied from https://github.com/bestiejs/punycode.js/blob/master/tests/tests.js , used under the MIT license.", + "decoded": "", + "encoded": "" +}, +{ + "description": "a single basic code point", + "decoded": "Bach", + "encoded": "Bach-" +}, +{ + "description": "a single non-ASCII character", + "decoded": "\u00FC", + "encoded": "tda" +}, +{ + "description": "multiple non-ASCII characters", + "decoded": "\u00FC\u00EB\u00E4\u00F6\u2665", + "encoded": "4can8av2009b" +}, +{ + "description": "mix of ASCII and non-ASCII characters", + "decoded": "b\u00FCcher", + "encoded": "bcher-kva" +}, +{ + "description": "long string with both ASCII and non-ASCII characters", + "decoded": "Willst du die Bl\u00FCthe des fr\u00FChen, die Fr\u00FCchte des sp\u00E4teren Jahres", + "encoded": "Willst du die Blthe des frhen, die Frchte des spteren Jahres-x9e96lkal" +}, +{ + "description": "Arabic (Egyptian)", + "decoded": "\u0644\u064A\u0647\u0645\u0627\u0628\u062A\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064A\u061F", + "encoded": "egbpdaj6bu4bxfgehfvwxn" +}, +{ + "description": "Chinese (simplified)", + "decoded": "\u4ED6\u4EEC\u4E3A\u4EC0\u4E48\u4E0D\u8BF4\u4E2d\u6587", + "encoded": "ihqwcrb4cv8a8dqg056pqjye" +}, +{ + "description": "Chinese (traditional)", + "decoded": "\u4ED6\u5011\u7232\u4EC0\u9EBD\u4E0D\u8AAA\u4E2D\u6587", + "encoded": "ihqwctvzc91f659drss3x8bo0yb" +}, +{ + "description": "Czech", + "decoded": "Pro\u010Dprost\u011Bnemluv\u00ED\u010Desky", + "encoded": "Proprostnemluvesky-uyb24dma41a" +}, +{ + "description": "Hebrew", + "decoded": "\u05DC\u05DE\u05D4\u05D4\u05DD\u05E4\u05E9\u05D5\u05D8\u05DC\u05D0\u05DE\u05D3\u05D1\u05E8\u05D9\u05DD\u05E2\u05D1\u05E8\u05D9\u05EA", + "encoded": "4dbcagdahymbxekheh6e0a7fei0b" +}, +{ + "description": "Hindi (Devanagari)", + "decoded": "\u092F\u0939\u0932\u094B\u0917\u0939\u093F\u0928\u094D\u0926\u0940\u0915\u094D\u092F\u094B\u0902\u0928\u0939\u0940\u0902\u092C\u094B\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902", + "encoded": "i1baa7eci9glrd9b2ae1bj0hfcgg6iyaf8o0a1dig0cd" +}, +{ + "description": "Japanese (kanji and hiragana)", + "decoded": "\u306A\u305C\u307F\u3093\u306A\u65E5\u672C\u8A9E\u3092\u8A71\u3057\u3066\u304F\u308C\u306A\u3044\u306E\u304B", + "encoded": "n8jok5ay5dzabd5bym9f0cm5685rrjetr6pdxa" +}, +{ + "description": "Korean (Hangul syllables)", + "decoded": "\uC138\uACC4\uC758\uBAA8\uB4E0\uC0AC\uB78C\uB4E4\uC774\uD55C\uAD6D\uC5B4\uB97C\uC774\uD574\uD55C\uB2E4\uBA74\uC5BC\uB9C8\uB098\uC88B\uC744\uAE4C", + "encoded": "989aomsvi5e83db1d2a355cv1e0vak1dwrv93d5xbh15a0dt30a5jpsd879ccm6fea98c" +}, +{ + "description": "Russian (Cyrillic)", + "decoded": "\u043F\u043E\u0447\u0435\u043C\u0443\u0436\u0435\u043E\u043D\u0438\u043D\u0435\u0433\u043E\u0432\u043E\u0440\u044F\u0442\u043F\u043E\u0440\u0443\u0441\u0441\u043A\u0438", + "encoded": "b1abfaaepdrnnbgefbadotcwatmq2g4l" +}, +{ + "description": "Spanish", + "decoded": "Porqu\u00E9nopuedensimplementehablarenEspa\u00F1ol", + "encoded": "PorqunopuedensimplementehablarenEspaol-fmd56a" +}, +{ + "description": "Vietnamese", + "decoded": "T\u1EA1isaoh\u1ECDkh\u00F4ngth\u1EC3ch\u1EC9n\u00F3iti\u1EBFngVi\u1EC7t", + "encoded": "TisaohkhngthchnitingVit-kjcr8268qyxafd2f1b9g" +}, +{ + "decoded": "3\u5E74B\u7D44\u91D1\u516B\u5148\u751F", + "encoded": "3B-ww4c5e180e575a65lsy2b" +}, +{ + "decoded": "\u5B89\u5BA4\u5948\u7F8E\u6075-with-SUPER-MONKEYS", + "encoded": "-with-SUPER-MONKEYS-pc58ag80a8qai00g7n9n" +}, +{ + "decoded": "Hello-Another-Way-\u305D\u308C\u305E\u308C\u306E\u5834\u6240", + "encoded": "Hello-Another-Way--fc4qua05auwb3674vfr0b" +}, +{ + "decoded": "\u3072\u3068\u3064\u5C4B\u6839\u306E\u4E0B2", + "encoded": "2-u9tlzr9756bt3uc0v" +}, +{ + "decoded": "Maji\u3067Koi\u3059\u308B5\u79D2\u524D", + "encoded": "MajiKoi5-783gue6qz075azm5e" +}, +{ + "decoded": "\u30D1\u30D5\u30A3\u30FCde\u30EB\u30F3\u30D0", + "encoded": "de-jg4avhby1noc0d" +}, +{ + "decoded": "\u305D\u306E\u30B9\u30D4\u30FC\u30C9\u3067", + "encoded": "d9juau41awczczp" +}, +{ + "description": "ASCII string that breaks the existing rules for host-name labels (It's not a realistic example for IDNA, because IDNA never encodes pure ASCII labels.)", + "decoded": "-> $1.00 <-", + "encoded": "-> $1.00 <--" +} +] diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/tests.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/tests.rs new file mode 100644 index 000000000..0a4ad03ee --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/tests.rs @@ -0,0 +1,25 @@ +extern crate idna; +extern crate rustc_serialize; +extern crate test; + +mod punycode; +mod uts46; + +fn main() { + let mut tests = Vec::new(); + { + let mut add_test = |name, run| { + tests.push(test::TestDescAndFn { + desc: test::TestDesc { + name: test::DynTestName(name), + ignore: false, + should_panic: test::ShouldPanic::No, + }, + testfn: run, + }) + }; + punycode::collect_tests(&mut add_test); + uts46::collect_tests(&mut add_test); + } + test::test_main(&std::env::args().collect::>(), tests) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/unit.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/unit.rs new file mode 100644 index 000000000..a7d158d5c --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/unit.rs @@ -0,0 +1,40 @@ +extern crate idna; +extern crate unicode_normalization; + +use idna::uts46; +use unicode_normalization::char::is_combining_mark; + + +fn _to_ascii(domain: &str) -> Result { + uts46::to_ascii(domain, uts46::Flags { + transitional_processing: false, + use_std3_ascii_rules: true, + verify_dns_length: true, + }) +} + +#[test] +fn test_v5() { + // IdnaTest:784 蔏。𑰺 + assert!(is_combining_mark('\u{11C3A}')); + assert!(_to_ascii("\u{11C3A}").is_err()); + assert!(_to_ascii("\u{850f}.\u{11C3A}").is_err()); + assert!(_to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err()); +} + +#[test] +fn test_v8_bidi_rules() { + assert_eq!(_to_ascii("abc").unwrap(), "abc"); + assert_eq!(_to_ascii("123").unwrap(), "123"); + assert_eq!(_to_ascii("אבּג").unwrap(), "xn--kdb3bdf"); + assert_eq!(_to_ascii("ابج").unwrap(), "xn--mgbcm"); + assert_eq!(_to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm"); + assert_eq!(_to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm"); + + // Bidi domain names cannot start with digits + assert!(_to_ascii("0a.\u{05D0}").is_err()); + assert!(_to_ascii("0à.\u{05D0}").is_err()); + + // Bidi chars may be punycode-encoded + assert!(_to_ascii("xn--0ca24w").is_err()); +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/uts46.rs b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/uts46.rs new file mode 100644 index 000000000..ddc8af989 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/idna/tests/uts46.rs @@ -0,0 +1,124 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::char; +use idna::uts46; +use test::TestFn; + +pub fn collect_tests(add_test: &mut F) { + // http://www.unicode.org/Public/idna/latest/IdnaTest.txt + for (i, line) in include_str!("IdnaTest.txt").lines().enumerate() { + if line == "" || line.starts_with("#") { + continue + } + // Remove comments + let mut line = match line.find("#") { + Some(index) => &line[0..index], + None => line + }; + + let mut expected_failure = false; + if line.starts_with("XFAIL") { + expected_failure = true; + line = &line[5..line.len()]; + }; + + let mut pieces = line.split(';').map(|x| x.trim()).collect::>(); + + let test_type = pieces.remove(0); + let original = pieces.remove(0); + let source = unescape(original); + let to_unicode = pieces.remove(0); + let to_ascii = pieces.remove(0); + let nv8 = if pieces.len() > 0 { pieces.remove(0) } else { "" }; + + if expected_failure { + continue; + } + + let test_name = format!("UTS #46 line {}", i + 1); + add_test(test_name, TestFn::dyn_test_fn(move || { + let result = uts46::to_ascii(&source, uts46::Flags { + use_std3_ascii_rules: true, + transitional_processing: test_type == "T", + verify_dns_length: true, + }); + + if to_ascii.starts_with("[") { + if to_ascii.starts_with("[C") { + // http://unicode.org/reports/tr46/#Deviations + // applications that perform IDNA2008 lookup are not required to check + // for these contexts + return; + } + if to_ascii == "[V2]" { + // Everybody ignores V2 + // https://github.com/servo/rust-url/pull/240 + // https://github.com/whatwg/url/issues/53#issuecomment-181528158 + // http://www.unicode.org/review/pri317/ + return; + } + let res = result.ok(); + assert!(res == None, "Expected error. result: {} | original: {} | source: {}", + res.unwrap(), original, source); + return; + } + + let to_ascii = if to_ascii.len() > 0 { + to_ascii.to_string() + } else { + if to_unicode.len() > 0 { + to_unicode.to_string() + } else { + source.clone() + } + }; + + if nv8 == "NV8" { + // This result isn't valid under IDNA2008. Skip it + return; + } + + assert!(result.is_ok(), "Couldn't parse {} | original: {} | error: {:?}", + source, original, result.err()); + let output = result.ok().unwrap(); + assert!(output == to_ascii, "result: {} | expected: {} | original: {} | source: {}", + output, to_ascii, original, source); + })) + } +} + +fn unescape(input: &str) -> String { + let mut output = String::new(); + let mut chars = input.chars(); + loop { + match chars.next() { + None => return output, + Some(c) => + if c == '\\' { + match chars.next().unwrap() { + '\\' => output.push('\\'), + 'u' => { + let c1 = chars.next().unwrap().to_digit(16).unwrap(); + let c2 = chars.next().unwrap().to_digit(16).unwrap(); + let c3 = chars.next().unwrap().to_digit(16).unwrap(); + let c4 = chars.next().unwrap().to_digit(16).unwrap(); + match char::from_u32((((c1 * 16 + c2) * 16 + c3) * 16 + c4)) + { + Some(c) => output.push(c), + None => { output.push_str(&format!("\\u{:X}{:X}{:X}{:X}",c1,c2,c3,c4)); } + }; + } + _ => panic!("Invalid test data input"), + } + } else { + output.push(c); + } + } + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/Cargo.toml b/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/Cargo.toml new file mode 100644 index 000000000..0f93ffc2f --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "percent-encoding" +version = "1.0.0" +authors = ["The rust-url developers"] +description = "Percent encoding and decoding" +repository = "https://github.com/servo/rust-url/" +license = "MIT/Apache-2.0" + +[lib] +doctest = false +test = false +path = "lib.rs" + +[dev-dependencies] +rustc-test = "0.1" +rustc-serialize = "0.3" diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/lib.rs b/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/lib.rs new file mode 100644 index 000000000..16d37ada6 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/percent_encoding/lib.rs @@ -0,0 +1,442 @@ +// Copyright 2013-2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! URLs use special chacters to indicate the parts of the request. For example, a forward slash +//! indicates a path. In order for that charcter to exist outside of a path separator, that +//! charcter would need to be encoded. +//! +//! Percent encoding replaces reserved charcters with the `%` escape charcter followed by hexidecimal +//! ASCII representaton. For non-ASCII charcters that are percent encoded, a UTF-8 byte sequence +//! becomes percent encoded. A simple example can be seen when the space literal is replaced with +//! `%20`. +//! +//! Percent encoding is further complicated by the fact that different parts of an URL have +//! different encoding requirements. In order to support the variety of encoding requirements, +//! `url::percent_encoding` includes different *encode sets*. +//! See [URL Standard](https://url.spec.whatwg.org/#percent-encoded-bytes) for details. +//! +//! This module provides some `*_ENCODE_SET` constants. +//! If a different set is required, it can be created with +//! the [`define_encode_set!`](../macro.define_encode_set!.html) macro. +//! +//! # Examples +//! +//! ``` +//! use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET}; +//! +//! assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F"); +//! ``` + +use std::ascii::AsciiExt; +use std::borrow::Cow; +use std::fmt; +use std::slice; +use std::str; + +/// Represents a set of characters / bytes that should be percent-encoded. +/// +/// See [encode sets specification](http://url.spec.whatwg.org/#simple-encode-set). +/// +/// Different characters need to be encoded in different parts of an URL. +/// For example, a literal `?` question mark in an URL’s path would indicate +/// the start of the query string. +/// A question mark meant to be part of the path therefore needs to be percent-encoded. +/// In the query string however, a question mark does not have any special meaning +/// and does not need to be percent-encoded. +/// +/// A few sets are defined in this module. +/// Use the [`define_encode_set!`](../macro.define_encode_set!.html) macro to define different ones. +pub trait EncodeSet: Clone { + /// Called with UTF-8 bytes rather than code points. + /// Should return true for all non-ASCII bytes. + fn contains(&self, byte: u8) -> bool; +} + +/// Define a new struct +/// that implements the [`EncodeSet`](percent_encoding/trait.EncodeSet.html) trait, +/// for use in [`percent_decode()`](percent_encoding/fn.percent_encode.html) +/// and related functions. +/// +/// Parameters are characters to include in the set in addition to those of the base set. +/// See [encode sets specification](http://url.spec.whatwg.org/#simple-encode-set). +/// +/// Example +/// ======= +/// +/// ```rust +/// #[macro_use] extern crate percent_encoding; +/// use percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET}; +/// define_encode_set! { +/// /// This encode set is used in the URL parser for query strings. +/// pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} +/// } +/// # fn main() { +/// assert_eq!(utf8_percent_encode("foo bar", QUERY_ENCODE_SET).collect::(), "foo%20bar"); +/// # } +/// ``` +#[macro_export] +macro_rules! define_encode_set { + ($(#[$attr: meta])* pub $name: ident = [$base_set: expr] | {$($ch: pat),*}) => { + $(#[$attr])* + #[derive(Copy, Clone, Debug)] + #[allow(non_camel_case_types)] + pub struct $name; + + impl $crate::EncodeSet for $name { + #[inline] + fn contains(&self, byte: u8) -> bool { + match byte as char { + $( + $ch => true, + )* + _ => $base_set.contains(byte) + } + } + } + } +} + +/// This encode set is used for the path of cannot-be-a-base URLs. +/// +/// All ASCII charcters less than hexidecimal 20 and greater than 7E are encoded. This includes +/// special charcters such as line feed, carriage return, NULL, etc. +#[derive(Copy, Clone, Debug)] +#[allow(non_camel_case_types)] +pub struct SIMPLE_ENCODE_SET; + +impl EncodeSet for SIMPLE_ENCODE_SET { + #[inline] + fn contains(&self, byte: u8) -> bool { + byte < 0x20 || byte > 0x7E + } +} + +define_encode_set! { + /// This encode set is used in the URL parser for query strings. + /// + /// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html), + /// space, double quote ("), hash (#), and inequality qualifiers (<), (>) are encoded. + pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} +} + +define_encode_set! { + /// This encode set is used for path components. + /// + /// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html), + /// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`), + /// question mark (?), and curly brackets ({), (}) are encoded. + pub DEFAULT_ENCODE_SET = [QUERY_ENCODE_SET] | {'`', '?', '{', '}'} +} + +define_encode_set! { + /// This encode set is used for on '/'-separated path segment + /// + /// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html), + /// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`), + /// question mark (?), and curly brackets ({), (}), percent sign (%), forward slash (/) are + /// encoded. + pub PATH_SEGMENT_ENCODE_SET = [DEFAULT_ENCODE_SET] | {'%', '/'} +} + +define_encode_set! { + /// This encode set is used for username and password. + /// + /// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html), + /// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`), + /// question mark (?), and curly brackets ({), (}), forward slash (/), colon (:), semi-colon (;), + /// equality (=), at (@), backslash (\\), square brackets ([), (]), caret (\^), and pipe (|) are + /// encoded. + pub USERINFO_ENCODE_SET = [DEFAULT_ENCODE_SET] | { + '/', ':', ';', '=', '@', '[', '\\', ']', '^', '|' + } +} + +/// Return the percent-encoding of the given bytes. +/// +/// This is unconditional, unlike `percent_encode()` which uses an encode set. +/// +/// # Examples +/// +/// ``` +/// use url::percent_encoding::percent_encode_byte; +/// +/// assert_eq!("foo bar".bytes().map(percent_encode_byte).collect::(), +/// "%66%6F%6F%20%62%61%72"); +/// ``` +pub fn percent_encode_byte(byte: u8) -> &'static str { + let index = usize::from(byte) * 3; + &"\ + %00%01%02%03%04%05%06%07%08%09%0A%0B%0C%0D%0E%0F\ + %10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E%1F\ + %20%21%22%23%24%25%26%27%28%29%2A%2B%2C%2D%2E%2F\ + %30%31%32%33%34%35%36%37%38%39%3A%3B%3C%3D%3E%3F\ + %40%41%42%43%44%45%46%47%48%49%4A%4B%4C%4D%4E%4F\ + %50%51%52%53%54%55%56%57%58%59%5A%5B%5C%5D%5E%5F\ + %60%61%62%63%64%65%66%67%68%69%6A%6B%6C%6D%6E%6F\ + %70%71%72%73%74%75%76%77%78%79%7A%7B%7C%7D%7E%7F\ + %80%81%82%83%84%85%86%87%88%89%8A%8B%8C%8D%8E%8F\ + %90%91%92%93%94%95%96%97%98%99%9A%9B%9C%9D%9E%9F\ + %A0%A1%A2%A3%A4%A5%A6%A7%A8%A9%AA%AB%AC%AD%AE%AF\ + %B0%B1%B2%B3%B4%B5%B6%B7%B8%B9%BA%BB%BC%BD%BE%BF\ + %C0%C1%C2%C3%C4%C5%C6%C7%C8%C9%CA%CB%CC%CD%CE%CF\ + %D0%D1%D2%D3%D4%D5%D6%D7%D8%D9%DA%DB%DC%DD%DE%DF\ + %E0%E1%E2%E3%E4%E5%E6%E7%E8%E9%EA%EB%EC%ED%EE%EF\ + %F0%F1%F2%F3%F4%F5%F6%F7%F8%F9%FA%FB%FC%FD%FE%FF\ + "[index..index + 3] +} + +/// Percent-encode the given bytes with the given encode set. +/// +/// The encode set define which bytes (in addition to non-ASCII and controls) +/// need to be percent-encoded. +/// The choice of this set depends on context. +/// For example, `?` needs to be encoded in an URL path but not in a query string. +/// +/// The return value is an iterator of `&str` slices (so it has a `.collect::()` method) +/// that also implements `Display` and `Into>`. +/// The latter returns `Cow::Borrowed` when none of the bytes in `input` +/// are in the given encode set. +/// +/// # Examples +/// +/// ``` +/// use url::percent_encoding::{percent_encode, DEFAULT_ENCODE_SET}; +/// +/// assert_eq!(percent_encode(b"foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F"); +/// ``` +#[inline] +pub fn percent_encode(input: &[u8], encode_set: E) -> PercentEncode { + PercentEncode { + bytes: input, + encode_set: encode_set, + } +} + +/// Percent-encode the UTF-8 encoding of the given string. +/// +/// See `percent_encode()` for how to use the return value. +/// +/// # Examples +/// +/// ``` +/// use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET}; +/// +/// assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F"); +/// ``` +#[inline] +pub fn utf8_percent_encode(input: &str, encode_set: E) -> PercentEncode { + percent_encode(input.as_bytes(), encode_set) +} + +/// The return type of `percent_encode()` and `utf8_percent_encode()`. +#[derive(Clone, Debug)] +pub struct PercentEncode<'a, E: EncodeSet> { + bytes: &'a [u8], + encode_set: E, +} + +impl<'a, E: EncodeSet> Iterator for PercentEncode<'a, E> { + type Item = &'a str; + + fn next(&mut self) -> Option<&'a str> { + if let Some((&first_byte, remaining)) = self.bytes.split_first() { + if self.encode_set.contains(first_byte) { + self.bytes = remaining; + Some(percent_encode_byte(first_byte)) + } else { + assert!(first_byte.is_ascii()); + for (i, &byte) in remaining.iter().enumerate() { + if self.encode_set.contains(byte) { + // 1 for first_byte + i for previous iterations of this loop + let (unchanged_slice, remaining) = self.bytes.split_at(1 + i); + self.bytes = remaining; + return Some(unsafe { str::from_utf8_unchecked(unchanged_slice) }) + } else { + assert!(byte.is_ascii()); + } + } + let unchanged_slice = self.bytes; + self.bytes = &[][..]; + Some(unsafe { str::from_utf8_unchecked(unchanged_slice) }) + } + } else { + None + } + } + + fn size_hint(&self) -> (usize, Option) { + if self.bytes.is_empty() { + (0, Some(0)) + } else { + (1, Some(self.bytes.len())) + } + } +} + +impl<'a, E: EncodeSet> fmt::Display for PercentEncode<'a, E> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + for c in (*self).clone() { + formatter.write_str(c)? + } + Ok(()) + } +} + +impl<'a, E: EncodeSet> From> for Cow<'a, str> { + fn from(mut iter: PercentEncode<'a, E>) -> Self { + match iter.next() { + None => "".into(), + Some(first) => { + match iter.next() { + None => first.into(), + Some(second) => { + let mut string = first.to_owned(); + string.push_str(second); + string.extend(iter); + string.into() + } + } + } + } + } +} + +/// Percent-decode the given bytes. +/// +/// The return value is an iterator of decoded `u8` bytes +/// that also implements `Into>` +/// (which returns `Cow::Borrowed` when `input` contains no percent-encoded sequence) +/// and has `decode_utf8()` and `decode_utf8_lossy()` methods. +/// +/// # Examples +/// +/// ``` +/// use url::percent_encoding::percent_decode; +/// +/// assert_eq!(percent_decode(b"foo%20bar%3F").decode_utf8().unwrap(), "foo bar?"); +/// ``` +#[inline] +pub fn percent_decode(input: &[u8]) -> PercentDecode { + PercentDecode { + bytes: input.iter() + } +} + +/// The return type of `percent_decode()`. +#[derive(Clone, Debug)] +pub struct PercentDecode<'a> { + bytes: slice::Iter<'a, u8>, +} + +fn after_percent_sign(iter: &mut slice::Iter) -> Option { + let initial_iter = iter.clone(); + let h = iter.next().and_then(|&b| (b as char).to_digit(16)); + let l = iter.next().and_then(|&b| (b as char).to_digit(16)); + if let (Some(h), Some(l)) = (h, l) { + Some(h as u8 * 0x10 + l as u8) + } else { + *iter = initial_iter; + None + } +} + +impl<'a> Iterator for PercentDecode<'a> { + type Item = u8; + + fn next(&mut self) -> Option { + self.bytes.next().map(|&byte| { + if byte == b'%' { + after_percent_sign(&mut self.bytes).unwrap_or(byte) + } else { + byte + } + }) + } + + fn size_hint(&self) -> (usize, Option) { + let bytes = self.bytes.len(); + (bytes / 3, Some(bytes)) + } +} + +impl<'a> From> for Cow<'a, [u8]> { + fn from(iter: PercentDecode<'a>) -> Self { + match iter.if_any() { + Some(vec) => Cow::Owned(vec), + None => Cow::Borrowed(iter.bytes.as_slice()), + } + } +} + +impl<'a> PercentDecode<'a> { + /// If the percent-decoding is different from the input, return it as a new bytes vector. + pub fn if_any(&self) -> Option> { + let mut bytes_iter = self.bytes.clone(); + while bytes_iter.any(|&b| b == b'%') { + if let Some(decoded_byte) = after_percent_sign(&mut bytes_iter) { + let initial_bytes = self.bytes.as_slice(); + let unchanged_bytes_len = initial_bytes.len() - bytes_iter.len() - 3; + let mut decoded = initial_bytes[..unchanged_bytes_len].to_owned(); + decoded.push(decoded_byte); + decoded.extend(PercentDecode { + bytes: bytes_iter + }); + return Some(decoded) + } + } + // Nothing to decode + None + } + + /// Decode the result of percent-decoding as UTF-8. + /// + /// This is return `Err` when the percent-decoded bytes are not well-formed in UTF-8. + pub fn decode_utf8(self) -> Result, str::Utf8Error> { + match self.clone().into() { + Cow::Borrowed(bytes) => { + match str::from_utf8(bytes) { + Ok(s) => Ok(s.into()), + Err(e) => Err(e), + } + } + Cow::Owned(bytes) => { + match String::from_utf8(bytes) { + Ok(s) => Ok(s.into()), + Err(e) => Err(e.utf8_error()), + } + } + } + } + + /// Decode the result of percent-decoding as UTF-8, lossily. + /// + /// Invalid UTF-8 percent-encoded byte sequences will be replaced � U+FFFD, + /// the replacement character. + pub fn decode_utf8_lossy(self) -> Cow<'a, str> { + decode_utf8_lossy(self.clone().into()) + } +} + +fn decode_utf8_lossy(input: Cow<[u8]>) -> Cow { + match input { + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + Cow::Owned(bytes) => { + let raw_utf8: *const [u8]; + match String::from_utf8_lossy(&bytes) { + Cow::Borrowed(utf8) => raw_utf8 = utf8.as_bytes(), + Cow::Owned(s) => return s.into(), + } + // from_utf8_lossy returned a borrow of `bytes` unchanged. + debug_assert!(raw_utf8 == &*bytes as *const [u8]); + // Reuse the existing `Vec` allocation. + unsafe { String::from_utf8_unchecked(bytes) }.into() + } + } +} + + diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/rust-url-todo b/collector/compile-benchmarks/cargo/url-1.5.1/rust-url-todo new file mode 100644 index 000000000..6aeefbf13 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/rust-url-todo @@ -0,0 +1,14 @@ +* standalone path parsing? +* Test setters + * Test trim C0/space + * Test remove tab & newline + + + +#[test] +fn test_path_segments() { + let mut url = Url::parse("http://example.net").unwrap(); + url.push_path_segment("foo").unwrap(); + url.extend_path_segments(&["bar", "b/az"]).unwrap(); + assert_eq!(url.as_str(), "http://example.net/foo"); +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/encoding.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/encoding.rs new file mode 100644 index 000000000..920b30e11 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/encoding.rs @@ -0,0 +1,146 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +//! Abstraction that conditionally compiles either to rust-encoding, +//! or to only support UTF-8. + +#[cfg(feature = "query_encoding")] extern crate encoding; + +use std::borrow::Cow; +#[cfg(feature = "query_encoding")] use std::fmt::{self, Debug, Formatter}; + +#[cfg(feature = "query_encoding")] use self::encoding::types::{DecoderTrap, EncoderTrap}; +#[cfg(feature = "query_encoding")] use self::encoding::label::encoding_from_whatwg_label; +#[cfg(feature = "query_encoding")] pub use self::encoding::types::EncodingRef; + +#[cfg(feature = "query_encoding")] +#[derive(Copy, Clone)] +pub struct EncodingOverride { + /// `None` means UTF-8. + encoding: Option +} + +#[cfg(feature = "query_encoding")] +impl EncodingOverride { + pub fn from_opt_encoding(encoding: Option) -> Self { + encoding.map(Self::from_encoding).unwrap_or_else(Self::utf8) + } + + pub fn from_encoding(encoding: EncodingRef) -> Self { + EncodingOverride { + encoding: if encoding.name() == "utf-8" { None } else { Some(encoding) } + } + } + + #[inline] + pub fn utf8() -> Self { + EncodingOverride { encoding: None } + } + + pub fn lookup(label: &[u8]) -> Option { + // Don't use String::from_utf8_lossy since no encoding label contains U+FFFD + // https://encoding.spec.whatwg.org/#names-and-labels + ::std::str::from_utf8(label) + .ok() + .and_then(encoding_from_whatwg_label) + .map(Self::from_encoding) + } + + /// https://encoding.spec.whatwg.org/#get-an-output-encoding + pub fn to_output_encoding(self) -> Self { + if let Some(encoding) = self.encoding { + if matches!(encoding.name(), "utf-16le" | "utf-16be") { + return Self::utf8() + } + } + self + } + + pub fn is_utf8(&self) -> bool { + self.encoding.is_none() + } + + pub fn name(&self) -> &'static str { + match self.encoding { + Some(encoding) => encoding.name(), + None => "utf-8", + } + } + + pub fn decode<'a>(&self, input: Cow<'a, [u8]>) -> Cow<'a, str> { + match self.encoding { + // `encoding.decode` never returns `Err` when called with `DecoderTrap::Replace` + Some(encoding) => encoding.decode(&input, DecoderTrap::Replace).unwrap().into(), + None => decode_utf8_lossy(input), + } + } + + pub fn encode<'a>(&self, input: Cow<'a, str>) -> Cow<'a, [u8]> { + match self.encoding { + // `encoding.encode` never returns `Err` when called with `EncoderTrap::NcrEscape` + Some(encoding) => Cow::Owned(encoding.encode(&input, EncoderTrap::NcrEscape).unwrap()), + None => encode_utf8(input) + } + } +} + +#[cfg(feature = "query_encoding")] +impl Debug for EncodingOverride { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "EncodingOverride {{ encoding: ")?; + match self.encoding { + Some(e) => write!(f, "{} }}", e.name()), + None => write!(f, "None }}") + } + } +} + +#[cfg(not(feature = "query_encoding"))] +#[derive(Copy, Clone, Debug)] +pub struct EncodingOverride; + +#[cfg(not(feature = "query_encoding"))] +impl EncodingOverride { + #[inline] + pub fn utf8() -> Self { + EncodingOverride + } + + pub fn decode<'a>(&self, input: Cow<'a, [u8]>) -> Cow<'a, str> { + decode_utf8_lossy(input) + } + + pub fn encode<'a>(&self, input: Cow<'a, str>) -> Cow<'a, [u8]> { + encode_utf8(input) + } +} + +pub fn decode_utf8_lossy(input: Cow<[u8]>) -> Cow { + match input { + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + Cow::Owned(bytes) => { + let raw_utf8: *const [u8]; + match String::from_utf8_lossy(&bytes) { + Cow::Borrowed(utf8) => raw_utf8 = utf8.as_bytes(), + Cow::Owned(s) => return s.into(), + } + // from_utf8_lossy returned a borrow of `bytes` unchanged. + debug_assert!(raw_utf8 == &*bytes as *const [u8]); + // Reuse the existing `Vec` allocation. + unsafe { String::from_utf8_unchecked(bytes) }.into() + } + } +} + +pub fn encode_utf8(input: Cow) -> Cow<[u8]> { + match input { + Cow::Borrowed(s) => Cow::Borrowed(s.as_bytes()), + Cow::Owned(s) => Cow::Owned(s.into_bytes()) + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/form_urlencoded.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/form_urlencoded.rs new file mode 100644 index 000000000..3d0931a5a --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/form_urlencoded.rs @@ -0,0 +1,369 @@ +// Copyright 2013-2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Parser and serializer for the [`application/x-www-form-urlencoded` syntax]( +//! http://url.spec.whatwg.org/#application/x-www-form-urlencoded), +//! as used by HTML forms. +//! +//! Converts between a string (such as an URL’s query string) +//! and a sequence of (name, value) pairs. + +use encoding::EncodingOverride; +use percent_encoding::{percent_encode_byte, percent_decode}; +use std::borrow::{Borrow, Cow}; +use std::str; + + +/// Convert a byte string in the `application/x-www-form-urlencoded` syntax +/// into a iterator of (name, value) pairs. +/// +/// Use `parse(input.as_bytes())` to parse a `&str` string. +/// +/// The names and values are percent-decoded. For instance, `%23first=%25try%25` will be +/// converted to `[("#first", "%try%")]`. +#[inline] +pub fn parse(input: &[u8]) -> Parse { + Parse { + input: input, + encoding: EncodingOverride::utf8(), + } +} + + +/// Convert a byte string in the `application/x-www-form-urlencoded` syntax +/// into a iterator of (name, value) pairs. +/// +/// Use `parse(input.as_bytes())` to parse a `&str` string. +/// +/// This function is only available if the `query_encoding` +/// [feature](http://doc.crates.io/manifest.html#the-features-section]) is enabled. +/// +/// Arguments: +/// +/// * `encoding_override`: The character encoding each name and values is decoded as +/// after percent-decoding. Defaults to UTF-8. +/// `EncodingRef` is defined in [rust-encoding](https://github.com/lifthrasiir/rust-encoding). +/// * `use_charset`: The *use _charset_ flag*. If in doubt, set to `false`. +#[cfg(feature = "query_encoding")] +pub fn parse_with_encoding<'a>(input: &'a [u8], + encoding_override: Option<::encoding::EncodingRef>, + use_charset: bool) + -> Result, ()> { + use std::ascii::AsciiExt; + + let mut encoding = EncodingOverride::from_opt_encoding(encoding_override); + if !(encoding.is_utf8() || input.is_ascii()) { + return Err(()) + } + if use_charset { + for sequence in input.split(|&b| b == b'&') { + // No '+' in "_charset_" to replace with ' '. + if sequence.starts_with(b"_charset_=") { + let value = &sequence[b"_charset_=".len()..]; + // Skip replacing '+' with ' ' in value since no encoding label contains either: + // https://encoding.spec.whatwg.org/#names-and-labels + if let Some(e) = EncodingOverride::lookup(value) { + encoding = e; + break + } + } + } + } + Ok(Parse { + input: input, + encoding: encoding, + }) +} + +/// The return type of `parse()`. +#[derive(Copy, Clone, Debug)] +pub struct Parse<'a> { + input: &'a [u8], + encoding: EncodingOverride, +} + +impl<'a> Iterator for Parse<'a> { + type Item = (Cow<'a, str>, Cow<'a, str>); + + fn next(&mut self) -> Option { + loop { + if self.input.is_empty() { + return None + } + let mut split2 = self.input.splitn(2, |&b| b == b'&'); + let sequence = split2.next().unwrap(); + self.input = split2.next().unwrap_or(&[][..]); + if sequence.is_empty() { + continue + } + let mut split2 = sequence.splitn(2, |&b| b == b'='); + let name = split2.next().unwrap(); + let value = split2.next().unwrap_or(&[][..]); + return Some(( + decode(name, self.encoding), + decode(value, self.encoding), + )) + } + } +} + +fn decode(input: &[u8], encoding: EncodingOverride) -> Cow { + let replaced = replace_plus(input); + encoding.decode(match percent_decode(&replaced).if_any() { + Some(vec) => Cow::Owned(vec), + None => replaced, + }) +} + +/// Replace b'+' with b' ' +fn replace_plus(input: &[u8]) -> Cow<[u8]> { + match input.iter().position(|&b| b == b'+') { + None => Cow::Borrowed(input), + Some(first_position) => { + let mut replaced = input.to_owned(); + replaced[first_position] = b' '; + for byte in &mut replaced[first_position + 1..] { + if *byte == b'+' { + *byte = b' '; + } + } + Cow::Owned(replaced) + } + } +} + +impl<'a> Parse<'a> { + /// Return a new iterator that yields pairs of `String` instead of pairs of `Cow`. + pub fn into_owned(self) -> ParseIntoOwned<'a> { + ParseIntoOwned { inner: self } + } +} + +/// Like `Parse`, but yields pairs of `String` instead of pairs of `Cow`. +#[derive(Debug)] +pub struct ParseIntoOwned<'a> { + inner: Parse<'a> +} + +impl<'a> Iterator for ParseIntoOwned<'a> { + type Item = (String, String); + + fn next(&mut self) -> Option { + self.inner.next().map(|(k, v)| (k.into_owned(), v.into_owned())) + } +} + +/// The [`application/x-www-form-urlencoded` byte serializer]( +/// https://url.spec.whatwg.org/#concept-urlencoded-byte-serializer). +/// +/// Return an iterator of `&str` slices. +pub fn byte_serialize(input: &[u8]) -> ByteSerialize { + ByteSerialize { + bytes: input, + } +} + +/// Return value of `byte_serialize()`. +#[derive(Debug)] +pub struct ByteSerialize<'a> { + bytes: &'a [u8], +} + +fn byte_serialized_unchanged(byte: u8) -> bool { + matches!(byte, b'*' | b'-' | b'.' | b'0' ... b'9' | b'A' ... b'Z' | b'_' | b'a' ... b'z') +} + +impl<'a> Iterator for ByteSerialize<'a> { + type Item = &'a str; + + fn next(&mut self) -> Option<&'a str> { + if let Some((&first, tail)) = self.bytes.split_first() { + if !byte_serialized_unchanged(first) { + self.bytes = tail; + return Some(if first == b' ' { "+" } else { percent_encode_byte(first) }) + } + let position = tail.iter().position(|&b| !byte_serialized_unchanged(b)); + let (unchanged_slice, remaining) = match position { + // 1 for first_byte + i unchanged in tail + Some(i) => self.bytes.split_at(1 + i), + None => (self.bytes, &[][..]), + }; + self.bytes = remaining; + Some(unsafe { str::from_utf8_unchecked(unchanged_slice) }) + } else { + None + } + } + + fn size_hint(&self) -> (usize, Option) { + if self.bytes.is_empty() { + (0, Some(0)) + } else { + (1, Some(self.bytes.len())) + } + } +} + +/// The [`application/x-www-form-urlencoded` serializer]( +/// https://url.spec.whatwg.org/#concept-urlencoded-serializer). +#[derive(Debug)] +pub struct Serializer { + target: Option, + start_position: usize, + encoding: EncodingOverride, +} + +pub trait Target { + fn as_mut_string(&mut self) -> &mut String; + fn finish(self) -> Self::Finished; + type Finished; +} + +impl Target for String { + fn as_mut_string(&mut self) -> &mut String { self } + fn finish(self) -> Self { self } + type Finished = Self; +} + +impl<'a> Target for &'a mut String { + fn as_mut_string(&mut self) -> &mut String { &mut **self } + fn finish(self) -> Self { self } + type Finished = Self; +} + +// `as_mut_string` string here exposes the internal serialization of an `Url`, +// which should not be exposed to users. +// We achieve that by not giving users direct access to `UrlQuery`: +// * Its fields are private +// (and so can not be constructed with struct literal syntax outside of this crate), +// * It has no constructor +// * It is only visible (on the type level) to users in the return type of +// `Url::query_pairs_mut` which is `Serializer` +// * `Serializer` keeps its target in a private field +// * Unlike in other `Target` impls, `UrlQuery::finished` does not return `Self`. +impl<'a> Target for ::UrlQuery<'a> { + fn as_mut_string(&mut self) -> &mut String { &mut self.url.serialization } + fn finish(self) -> &'a mut ::Url { unreachable!() } + type Finished = &'a mut ::Url; +} + +impl Serializer { + /// Create a new `application/x-www-form-urlencoded` serializer for the given target. + /// + /// If the target is non-empty, + /// its content is assumed to already be in `application/x-www-form-urlencoded` syntax. + pub fn new(target: T) -> Self { + Self::for_suffix(target, 0) + } + + /// Create a new `application/x-www-form-urlencoded` serializer + /// for a suffix of the given target. + /// + /// If that suffix is non-empty, + /// its content is assumed to already be in `application/x-www-form-urlencoded` syntax. + pub fn for_suffix(mut target: T, start_position: usize) -> Self { + &target.as_mut_string()[start_position..]; // Panic if out of bounds + Serializer { + target: Some(target), + start_position: start_position, + encoding: EncodingOverride::utf8(), + } + } + + /// Remove any existing name/value pair. + /// + /// Panics if called after `.finish()`. + pub fn clear(&mut self) -> &mut Self { + string(&mut self.target).truncate(self.start_position); + self + } + + /// Set the character encoding to be used for names and values before percent-encoding. + #[cfg(feature = "query_encoding")] + pub fn encoding_override(&mut self, new: Option<::encoding::EncodingRef>) -> &mut Self { + self.encoding = EncodingOverride::from_opt_encoding(new).to_output_encoding(); + self + } + + /// Serialize and append a name/value pair. + /// + /// Panics if called after `.finish()`. + pub fn append_pair(&mut self, name: &str, value: &str) -> &mut Self { + append_pair(string(&mut self.target), self.start_position, self.encoding, name, value); + self + } + + /// Serialize and append a number of name/value pairs. + /// + /// This simply calls `append_pair` repeatedly. + /// This can be more convenient, so the user doesn’t need to introduce a block + /// to limit the scope of `Serializer`’s borrow of its string. + /// + /// Panics if called after `.finish()`. + pub fn extend_pairs(&mut self, iter: I) -> &mut Self + where I: IntoIterator, I::Item: Borrow<(K, V)>, K: AsRef, V: AsRef { + { + let string = string(&mut self.target); + for pair in iter { + let &(ref k, ref v) = pair.borrow(); + append_pair(string, self.start_position, self.encoding, k.as_ref(), v.as_ref()); + } + } + self + } + + /// Add a name/value pair whose name is `_charset_` + /// and whose value is the character encoding’s name. + /// (See the `encoding_override()` method.) + /// + /// Panics if called after `.finish()`. + #[cfg(feature = "query_encoding")] + pub fn append_charset(&mut self) -> &mut Self { + { + let string = string(&mut self.target); + append_separator_if_needed(string, self.start_position); + string.push_str("_charset_="); + string.push_str(self.encoding.name()); + } + self + } + + /// If this serializer was constructed with a string, take and return that string. + /// + /// ```rust + /// use url::form_urlencoded; + /// let encoded: String = form_urlencoded::Serializer::new(String::new()) + /// .append_pair("foo", "bar & baz") + /// .append_pair("saison", "Été+hiver") + /// .finish(); + /// assert_eq!(encoded, "foo=bar+%26+baz&saison=%C3%89t%C3%A9%2Bhiver"); + /// ``` + /// + /// Panics if called more than once. + pub fn finish(&mut self) -> T::Finished { + self.target.take().expect("url::form_urlencoded::Serializer double finish").finish() + } +} + +fn append_separator_if_needed(string: &mut String, start_position: usize) { + if string.len() > start_position { + string.push('&') + } +} + +fn string(target: &mut Option) -> &mut String { + target.as_mut().expect("url::form_urlencoded::Serializer finished").as_mut_string() +} + +fn append_pair(string: &mut String, start_position: usize, encoding: EncodingOverride, + name: &str, value: &str) { + append_separator_if_needed(string, start_position); + string.extend(byte_serialize(&encoding.encode(name.into()))); + string.push('='); + string.extend(byte_serialize(&encoding.encode(value.into()))); +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/host.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/host.rs new file mode 100644 index 000000000..7d4fe13df --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/host.rs @@ -0,0 +1,502 @@ +// Copyright 2013-2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[cfg(feature = "heapsize")] use heapsize::HeapSizeOf; +use std::cmp; +use std::fmt::{self, Formatter}; +use std::io; +use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs}; +use std::vec; +use parser::{ParseResult, ParseError}; +use percent_encoding::percent_decode; +use idna; + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum HostInternal { + None, + Domain, + Ipv4(Ipv4Addr), + Ipv6(Ipv6Addr), +} + +#[cfg(feature = "heapsize")] +known_heap_size!(0, HostInternal); + +#[cfg(feature="serde")] +impl ::serde::Serialize for HostInternal { + fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> where S: ::serde::Serializer { + // This doesn’t use `derive` because that involves + // large dependencies (that take a long time to build), and + // either Macros 1.1 which are not stable yet or a cumbersome build script. + // + // Implementing `Serializer` correctly for an enum is tricky, + // so let’s use existing enums that already do. + use std::net::IpAddr; + match *self { + HostInternal::None => None, + HostInternal::Domain => Some(None), + HostInternal::Ipv4(addr) => Some(Some(IpAddr::V4(addr))), + HostInternal::Ipv6(addr) => Some(Some(IpAddr::V6(addr))), + }.serialize(serializer) + } +} + +#[cfg(feature="serde")] +impl ::serde::Deserialize for HostInternal { + fn deserialize(deserializer: &mut D) -> Result where D: ::serde::Deserializer { + use std::net::IpAddr; + Ok(match ::serde::Deserialize::deserialize(deserializer)? { + None => HostInternal::None, + Some(None) => HostInternal::Domain, + Some(Some(IpAddr::V4(addr))) => HostInternal::Ipv4(addr), + Some(Some(IpAddr::V6(addr))) => HostInternal::Ipv6(addr), + }) + } +} + +impl From> for HostInternal { + fn from(host: Host) -> HostInternal { + match host { + Host::Domain(_) => HostInternal::Domain, + Host::Ipv4(address) => HostInternal::Ipv4(address), + Host::Ipv6(address) => HostInternal::Ipv6(address), + } + } +} + +/// The host name of an URL. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum Host { + /// A DNS domain name, as '.' dot-separated labels. + /// Non-ASCII labels are encoded in punycode per IDNA. + Domain(S), + + /// An IPv4 address. + /// `Url::host_str` returns the serialization of this address, + /// as four decimal integers separated by `.` dots. + Ipv4(Ipv4Addr), + + /// An IPv6 address. + /// `Url::host_str` returns the serialization of that address between `[` and `]` brackets, + /// in the format per [RFC 5952 *A Recommendation + /// for IPv6 Address Text Representation*](https://tools.ietf.org/html/rfc5952): + /// lowercase hexadecimal with maximal `::` compression. + Ipv6(Ipv6Addr), +} + +#[cfg(feature="serde")] +impl ::serde::Serialize for Host { + fn serialize(&self, serializer: &mut R) -> Result<(), R::Error> where R: ::serde::Serializer { + use std::net::IpAddr; + match *self { + Host::Domain(ref s) => Ok(s), + Host::Ipv4(addr) => Err(IpAddr::V4(addr)), + Host::Ipv6(addr) => Err(IpAddr::V6(addr)), + }.serialize(serializer) + } +} + +#[cfg(feature="serde")] +impl ::serde::Deserialize for Host { + fn deserialize(deserializer: &mut D) -> Result where D: ::serde::Deserializer { + use std::net::IpAddr; + Ok(match ::serde::Deserialize::deserialize(deserializer)? { + Ok(s) => Host::Domain(s), + Err(IpAddr::V4(addr)) => Host::Ipv4(addr), + Err(IpAddr::V6(addr)) => Host::Ipv6(addr), + }) + } +} + +#[cfg(feature = "heapsize")] +impl HeapSizeOf for Host { + fn heap_size_of_children(&self) -> usize { + match *self { + Host::Domain(ref s) => s.heap_size_of_children(), + _ => 0, + } + } +} + +impl<'a> Host<&'a str> { + /// Return a copy of `self` that owns an allocated `String` but does not borrow an `&Url`. + pub fn to_owned(&self) -> Host { + match *self { + Host::Domain(domain) => Host::Domain(domain.to_owned()), + Host::Ipv4(address) => Host::Ipv4(address), + Host::Ipv6(address) => Host::Ipv6(address), + } + } +} + +impl Host { + /// Parse a host: either an IPv6 address in [] square brackets, or a domain. + /// + /// https://url.spec.whatwg.org/#host-parsing + pub fn parse(input: &str) -> Result { + if input.starts_with('[') { + if !input.ends_with(']') { + return Err(ParseError::InvalidIpv6Address) + } + return parse_ipv6addr(&input[1..input.len() - 1]).map(Host::Ipv6) + } + let domain = percent_decode(input.as_bytes()).decode_utf8_lossy(); + let domain = idna::domain_to_ascii(&domain)?; + if domain.find(|c| matches!(c, + '\0' | '\t' | '\n' | '\r' | ' ' | '#' | '%' | '/' | ':' | '?' | '@' | '[' | '\\' | ']' + )).is_some() { + return Err(ParseError::InvalidDomainCharacter) + } + if let Some(address) = parse_ipv4addr(&domain)? { + Ok(Host::Ipv4(address)) + } else { + Ok(Host::Domain(domain.into())) + } + } +} + +impl> fmt::Display for Host { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match *self { + Host::Domain(ref domain) => domain.as_ref().fmt(f), + Host::Ipv4(ref addr) => addr.fmt(f), + Host::Ipv6(ref addr) => { + f.write_str("[")?; + write_ipv6(addr, f)?; + f.write_str("]") + } + } + } +} + +/// This mostly exists because coherence rules don’t allow us to implement +/// `ToSocketAddrs for (Host, u16)`. +#[derive(Clone, Debug)] +pub struct HostAndPort { + pub host: Host, + pub port: u16, +} + +impl<'a> HostAndPort<&'a str> { + /// Return a copy of `self` that owns an allocated `String` but does not borrow an `&Url`. + pub fn to_owned(&self) -> HostAndPort { + HostAndPort { + host: self.host.to_owned(), + port: self.port + } + } +} + +impl> fmt::Display for HostAndPort { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + self.host.fmt(f)?; + f.write_str(":")?; + self.port.fmt(f) + } +} + + +impl> ToSocketAddrs for HostAndPort { + type Iter = SocketAddrs; + + fn to_socket_addrs(&self) -> io::Result { + let port = self.port; + match self.host { + Host::Domain(ref domain) => Ok(SocketAddrs { + // FIXME: use std::net::lookup_host when it’s stable. + state: SocketAddrsState::Domain((domain.as_ref(), port).to_socket_addrs()?) + }), + Host::Ipv4(address) => Ok(SocketAddrs { + state: SocketAddrsState::One(SocketAddr::V4(SocketAddrV4::new(address, port))) + }), + Host::Ipv6(address) => Ok(SocketAddrs { + state: SocketAddrsState::One(SocketAddr::V6(SocketAddrV6::new(address, port, 0, 0))) + }), + } + } +} + +/// Socket addresses for an URL. +#[derive(Debug)] +pub struct SocketAddrs { + state: SocketAddrsState +} + +#[derive(Debug)] +enum SocketAddrsState { + Domain(vec::IntoIter), + One(SocketAddr), + Done, +} + +impl Iterator for SocketAddrs { + type Item = SocketAddr; + fn next(&mut self) -> Option { + match self.state { + SocketAddrsState::Domain(ref mut iter) => iter.next(), + SocketAddrsState::One(s) => { + self.state = SocketAddrsState::Done; + Some(s) + } + SocketAddrsState::Done => None + } + } +} + +fn write_ipv6(addr: &Ipv6Addr, f: &mut Formatter) -> fmt::Result { + let segments = addr.segments(); + let (compress_start, compress_end) = longest_zero_sequence(&segments); + let mut i = 0; + while i < 8 { + if i == compress_start { + f.write_str(":")?; + if i == 0 { + f.write_str(":")?; + } + if compress_end < 8 { + i = compress_end; + } else { + break; + } + } + write!(f, "{:x}", segments[i as usize])?; + if i < 7 { + f.write_str(":")?; + } + i += 1; + } + Ok(()) +} + +// https://url.spec.whatwg.org/#concept-ipv6-serializer step 2 and 3 +fn longest_zero_sequence(pieces: &[u16; 8]) -> (isize, isize) { + let mut longest = -1; + let mut longest_length = -1; + let mut start = -1; + macro_rules! finish_sequence( + ($end: expr) => { + if start >= 0 { + let length = $end - start; + if length > longest_length { + longest = start; + longest_length = length; + } + } + }; + ); + for i in 0..8 { + if pieces[i as usize] == 0 { + if start < 0 { + start = i; + } + } else { + finish_sequence!(i); + start = -1; + } + } + finish_sequence!(8); + // https://url.spec.whatwg.org/#concept-ipv6-serializer + // step 3: ignore lone zeroes + if longest_length < 2 { + (-1, -2) + } else { + (longest, longest + longest_length) + } +} + +/// https://url.spec.whatwg.org/#ipv4-number-parser +fn parse_ipv4number(mut input: &str) -> Result { + let mut r = 10; + if input.starts_with("0x") || input.starts_with("0X") { + input = &input[2..]; + r = 16; + } else if input.len() >= 2 && input.starts_with('0') { + input = &input[1..]; + r = 8; + } + if input.is_empty() { + return Ok(0); + } + if input.starts_with('+') { + return Err(()) + } + match u32::from_str_radix(input, r) { + Ok(number) => Ok(number), + Err(_) => Err(()), + } +} + +/// https://url.spec.whatwg.org/#concept-ipv4-parser +fn parse_ipv4addr(input: &str) -> ParseResult> { + if input.is_empty() { + return Ok(None) + } + let mut parts: Vec<&str> = input.split('.').collect(); + if parts.last() == Some(&"") { + parts.pop(); + } + if parts.len() > 4 { + return Ok(None); + } + let mut numbers: Vec = Vec::new(); + for part in parts { + if part == "" { + return Ok(None); + } + if let Ok(n) = parse_ipv4number(part) { + numbers.push(n); + } else { + return Ok(None); + } + } + let mut ipv4 = numbers.pop().expect("a non-empty list of numbers"); + // Equivalent to: ipv4 >= 256 ** (4 − numbers.len()) + if ipv4 > u32::max_value() >> (8 * numbers.len() as u32) { + return Err(ParseError::InvalidIpv4Address); + } + if numbers.iter().any(|x| *x > 255) { + return Err(ParseError::InvalidIpv4Address); + } + for (counter, n) in numbers.iter().enumerate() { + ipv4 += n << (8 * (3 - counter as u32)) + } + Ok(Some(Ipv4Addr::from(ipv4))) +} + +/// https://url.spec.whatwg.org/#concept-ipv6-parser +fn parse_ipv6addr(input: &str) -> ParseResult { + let input = input.as_bytes(); + let len = input.len(); + let mut is_ip_v4 = false; + let mut pieces = [0, 0, 0, 0, 0, 0, 0, 0]; + let mut piece_pointer = 0; + let mut compress_pointer = None; + let mut i = 0; + + if len < 2 { + return Err(ParseError::InvalidIpv6Address) + } + + if input[0] == b':' { + if input[1] != b':' { + return Err(ParseError::InvalidIpv6Address) + } + i = 2; + piece_pointer = 1; + compress_pointer = Some(1); + } + + while i < len { + if piece_pointer == 8 { + return Err(ParseError::InvalidIpv6Address) + } + if input[i] == b':' { + if compress_pointer.is_some() { + return Err(ParseError::InvalidIpv6Address) + } + i += 1; + piece_pointer += 1; + compress_pointer = Some(piece_pointer); + continue + } + let start = i; + let end = cmp::min(len, start + 4); + let mut value = 0u16; + while i < end { + match (input[i] as char).to_digit(16) { + Some(digit) => { + value = value * 0x10 + digit as u16; + i += 1; + }, + None => break + } + } + if i < len { + match input[i] { + b'.' => { + if i == start { + return Err(ParseError::InvalidIpv6Address) + } + i = start; + is_ip_v4 = true; + }, + b':' => { + i += 1; + if i == len { + return Err(ParseError::InvalidIpv6Address) + } + }, + _ => return Err(ParseError::InvalidIpv6Address) + } + } + if is_ip_v4 { + break + } + pieces[piece_pointer] = value; + piece_pointer += 1; + } + + if is_ip_v4 { + if piece_pointer > 6 { + return Err(ParseError::InvalidIpv6Address) + } + let mut dots_seen = 0; + while i < len { + let mut value = None; + while i < len { + let digit = match input[i] { + c @ b'0' ... b'9' => c - b'0', + _ => break + }; + match value { + None => value = Some(digit as u16), + Some(0) => return Err(ParseError::InvalidIpv6Address), // No leading zero + Some(ref mut v) => { + *v = *v * 10 + digit as u16; + if *v > 255 { + return Err(ParseError::InvalidIpv6Address) + } + } + } + i += 1; + } + if dots_seen < 3 && !(i < len && input[i] == b'.') { + return Err(ParseError::InvalidIpv6Address) + } + pieces[piece_pointer] = if let Some(v) = value { + pieces[piece_pointer] * 0x100 + v + } else { + return Err(ParseError::InvalidIpv6Address) + }; + if dots_seen == 1 || dots_seen == 3 { + piece_pointer += 1; + } + i += 1; + if dots_seen == 3 && i < len { + return Err(ParseError::InvalidIpv6Address) + } + dots_seen += 1; + } + } + match compress_pointer { + Some(compress_pointer) => { + let mut swaps = piece_pointer - compress_pointer; + piece_pointer = 7; + while swaps > 0 { + pieces.swap(piece_pointer, compress_pointer + swaps - 1); + swaps -= 1; + piece_pointer -= 1; + } + } + _ => if piece_pointer != 8 { + return Err(ParseError::InvalidIpv6Address) + } + } + Ok(Ipv6Addr::new(pieces[0], pieces[1], pieces[2], pieces[3], + pieces[4], pieces[5], pieces[6], pieces[7])) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/lib.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/lib.rs new file mode 100644 index 000000000..2907da822 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/lib.rs @@ -0,0 +1,2403 @@ +// Copyright 2013-2015 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/*! + +rust-url is an implementation of the [URL Standard](http://url.spec.whatwg.org/) +for the [Rust](http://rust-lang.org/) programming language. + + +# URL parsing and data structures + +First, URL parsing may fail for various reasons and therefore returns a `Result`. + +``` +use url::{Url, ParseError}; + +assert!(Url::parse("http://[:::1]") == Err(ParseError::InvalidIpv6Address)) +``` + +Let’s parse a valid URL and look at its components. + +``` +use url::{Url, Host}; +# use url::ParseError; +# fn run() -> Result<(), ParseError> { +let issue_list_url = Url::parse( + "https://github.com/rust-lang/rust/issues?labels=E-easy&state=open" +)?; + + +assert!(issue_list_url.scheme() == "https"); +assert!(issue_list_url.username() == ""); +assert!(issue_list_url.password() == None); +assert!(issue_list_url.host_str() == Some("github.com")); +assert!(issue_list_url.host() == Some(Host::Domain("github.com"))); +assert!(issue_list_url.port() == None); +assert!(issue_list_url.path() == "/rust-lang/rust/issues"); +assert!(issue_list_url.path_segments().map(|c| c.collect::>()) == + Some(vec!["rust-lang", "rust", "issues"])); +assert!(issue_list_url.query() == Some("labels=E-easy&state=open")); +assert!(issue_list_url.fragment() == None); +assert!(!issue_list_url.cannot_be_a_base()); +# Ok(()) +# } +# run().unwrap(); +``` + +Some URLs are said to be *cannot-be-a-base*: +they don’t have a username, password, host, or port, +and their "path" is an arbitrary string rather than slash-separated segments: + +``` +use url::Url; +# use url::ParseError; + +# fn run() -> Result<(), ParseError> { +let data_url = Url::parse("data:text/plain,Hello?World#")?; + +assert!(data_url.cannot_be_a_base()); +assert!(data_url.scheme() == "data"); +assert!(data_url.path() == "text/plain,Hello"); +assert!(data_url.path_segments().is_none()); +assert!(data_url.query() == Some("World")); +assert!(data_url.fragment() == Some("")); +# Ok(()) +# } +# run().unwrap(); +``` + + +# Base URL + +Many contexts allow URL *references* that can be relative to a *base URL*: + +```html + +``` + +Since parsed URL are absolute, giving a base is required for parsing relative URLs: + +``` +use url::{Url, ParseError}; + +assert!(Url::parse("../main.css") == Err(ParseError::RelativeUrlWithoutBase)) +``` + +Use the `join` method on an `Url` to use it as a base URL: + +``` +use url::Url; +# use url::ParseError; + +# fn run() -> Result<(), ParseError> { +let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html")?; +let css_url = this_document.join("../main.css")?; +assert_eq!(css_url.as_str(), "http://servo.github.io/rust-url/main.css"); +# Ok(()) +# } +# run().unwrap(); +*/ + +#![doc(html_root_url = "https://docs.rs/url/1.5.1")] + +#[cfg(feature="rustc-serialize")] extern crate rustc_serialize; +#[macro_use] extern crate matches; +#[cfg(feature="serde")] extern crate serde; +#[cfg(feature="heapsize")] #[macro_use] extern crate heapsize; + +pub extern crate idna; +pub extern crate percent_encoding; + +use encoding::EncodingOverride; +#[cfg(feature = "heapsize")] use heapsize::HeapSizeOf; +use host::HostInternal; +use parser::{Parser, Context, SchemeType, to_u32}; +use percent_encoding::{PATH_SEGMENT_ENCODE_SET, USERINFO_ENCODE_SET, + percent_encode, percent_decode, utf8_percent_encode}; +use std::borrow::Borrow; +use std::cmp; +#[cfg(feature = "serde")] use std::error::Error; +use std::fmt::{self, Write, Debug, Formatter}; +use std::hash; +use std::io; +use std::mem; +use std::net::{ToSocketAddrs, IpAddr}; +use std::ops::{Range, RangeFrom, RangeTo}; +use std::path::{Path, PathBuf}; +use std::str; + +pub use origin::{Origin, OpaqueOrigin}; +pub use host::{Host, HostAndPort, SocketAddrs}; +pub use path_segments::PathSegmentsMut; +pub use parser::ParseError; +pub use slicing::Position; + +mod encoding; +mod host; +mod origin; +mod path_segments; +mod parser; +mod slicing; + +pub mod form_urlencoded; +#[doc(hidden)] pub mod quirks; + +/// A parsed URL record. +#[derive(Clone)] +pub struct Url { + /// Syntax in pseudo-BNF: + /// + /// url = scheme ":" [ hierarchical | non-hierarchical ] [ "?" query ]? [ "#" fragment ]? + /// non-hierarchical = non-hierarchical-path + /// non-hierarchical-path = /* Does not start with "/" */ + /// hierarchical = authority? hierarchical-path + /// authority = "//" userinfo? host [ ":" port ]? + /// userinfo = username [ ":" password ]? "@" + /// hierarchical-path = [ "/" path-segment ]+ + serialization: String, + + // Components + scheme_end: u32, // Before ':' + username_end: u32, // Before ':' (if a password is given) or '@' (if not) + host_start: u32, + host_end: u32, + host: HostInternal, + port: Option, + path_start: u32, // Before initial '/', if any + query_start: Option, // Before '?', unlike Position::QueryStart + fragment_start: Option, // Before '#', unlike Position::FragmentStart +} + +#[cfg(feature = "heapsize")] +impl HeapSizeOf for Url { + fn heap_size_of_children(&self) -> usize { + self.serialization.heap_size_of_children() + } +} + +/// Full configuration for the URL parser. +#[derive(Copy, Clone)] +pub struct ParseOptions<'a> { + base_url: Option<&'a Url>, + encoding_override: encoding::EncodingOverride, + log_syntax_violation: Option<&'a Fn(&'static str)>, +} + +impl<'a> ParseOptions<'a> { + /// Change the base URL + pub fn base_url(mut self, new: Option<&'a Url>) -> Self { + self.base_url = new; + self + } + + /// Override the character encoding of query strings. + /// This is a legacy concept only relevant for HTML. + /// + /// `EncodingRef` is defined in [rust-encoding](https://github.com/lifthrasiir/rust-encoding). + /// + /// This method is only available if the `query_encoding` + /// [feature](http://doc.crates.io/manifest.html#the-features-section]) is enabled. + #[cfg(feature = "query_encoding")] + pub fn encoding_override(mut self, new: Option) -> Self { + self.encoding_override = EncodingOverride::from_opt_encoding(new).to_output_encoding(); + self + } + + /// Call the provided function or closure on non-fatal parse errors. + pub fn log_syntax_violation(mut self, new: Option<&'a Fn(&'static str)>) -> Self { + self.log_syntax_violation = new; + self + } + + /// Parse an URL string with the configuration so far. + pub fn parse(self, input: &str) -> Result { + Parser { + serialization: String::with_capacity(input.len()), + base_url: self.base_url, + query_encoding_override: self.encoding_override, + log_syntax_violation: self.log_syntax_violation, + context: Context::UrlParser, + }.parse_url(input) + } +} + +impl<'a> Debug for ParseOptions<'a> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "ParseOptions {{ base_url: {:?}, encoding_override: {:?}, log_syntax_violation: ", self.base_url, self.encoding_override)?; + match self.log_syntax_violation { + Some(_) => write!(f, "Some(Fn(&'static str)) }}"), + None => write!(f, "None }}") + } + } +} + +impl Url { + /// Parse an absolute URL from a string. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.net")?; + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn parse(input: &str) -> Result { + Url::options().parse(input) + } + + /// Parse an absolute URL from a string and add params to its query string. + /// + /// Existing params are not removed. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse_with_params("https://example.net?dont=clobberme", + /// &[("lang", "rust"), ("browser", "servo")])?; + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn parse_with_params(input: &str, iter: I) -> Result + where I: IntoIterator, + I::Item: Borrow<(K, V)>, + K: AsRef, + V: AsRef + { + let mut url = Url::options().parse(input); + + if let Ok(ref mut url) = url { + url.query_pairs_mut().extend_pairs(iter); + } + + url + } + + /// Parse a string as an URL, with this URL as the base URL. + /// + /// Note: a trailing slash is significant. + /// Without it, the last path component is considered to be a “file” name + /// to be removed to get at the “directory” that is used as the base: + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let base = Url::parse("https://example.net/a/b.html")?; + /// let url = base.join("c.png")?; + /// assert_eq!(url.as_str(), "https://example.net/a/c.png"); // Not /a/b.html/c.png + /// + /// let base = Url::parse("https://example.net/a/b/")?; + /// let url = base.join("c.png")?; + /// assert_eq!(url.as_str(), "https://example.net/a/b/c.png"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn join(&self, input: &str) -> Result { + Url::options().base_url(Some(self)).parse(input) + } + + /// Return a default `ParseOptions` that can fully configure the URL parser. + /// + /// # Examples + /// + /// Get default `ParseOptions`, then change base url + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// # fn run() -> Result<(), ParseError> { + /// let options = Url::options(); + /// let api = Url::parse("https://api.example.com")?; + /// let base_url = options.base_url(Some(&api)); + /// let version_url = base_url.parse("version.json")?; + /// assert_eq!(version_url.as_str(), "https://api.example.com/version.json"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn options<'a>() -> ParseOptions<'a> { + ParseOptions { + base_url: None, + encoding_override: EncodingOverride::utf8(), + log_syntax_violation: None, + } + } + + /// Return the serialization of this URL. + /// + /// This is fast since that serialization is already stored in the `Url` struct. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url_str = "https://example.net/"; + /// let url = Url::parse(url_str)?; + /// assert_eq!(url.as_str(), url_str); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn as_str(&self) -> &str { + &self.serialization + } + + /// Return the serialization of this URL. + /// + /// This consumes the `Url` and takes ownership of the `String` stored in it. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url_str = "https://example.net/"; + /// let url = Url::parse(url_str)?; + /// assert_eq!(url.into_string(), url_str); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn into_string(self) -> String { + self.serialization + } + + /// For internal testing, not part of the public API. + /// + /// Methods of the `Url` struct assume a number of invariants. + /// This checks each of these invariants and panic if one is not met. + /// This is for testing rust-url itself. + #[doc(hidden)] + pub fn check_invariants(&self) -> Result<(), String> { + macro_rules! assert { + ($x: expr) => { + if !$x { + return Err(format!("!( {} ) for URL {:?}", + stringify!($x), self.serialization)) + } + } + } + + macro_rules! assert_eq { + ($a: expr, $b: expr) => { + { + let a = $a; + let b = $b; + if a != b { + return Err(format!("{:?} != {:?} ({} != {}) for URL {:?}", + a, b, stringify!($a), stringify!($b), + self.serialization)) + } + } + } + } + + assert!(self.scheme_end >= 1); + assert!(matches!(self.byte_at(0), b'a'...b'z' | b'A'...b'Z')); + assert!(self.slice(1..self.scheme_end).chars() + .all(|c| matches!(c, 'a'...'z' | 'A'...'Z' | '0'...'9' | '+' | '-' | '.'))); + assert_eq!(self.byte_at(self.scheme_end), b':'); + + if self.slice(self.scheme_end + 1 ..).starts_with("//") { + // URL with authority + match self.byte_at(self.username_end) { + b':' => { + assert!(self.host_start >= self.username_end + 2); + assert_eq!(self.byte_at(self.host_start - 1), b'@'); + } + b'@' => assert!(self.host_start == self.username_end + 1), + _ => assert_eq!(self.username_end, self.scheme_end + 3), + } + assert!(self.host_start >= self.username_end); + assert!(self.host_end >= self.host_start); + let host_str = self.slice(self.host_start..self.host_end); + match self.host { + HostInternal::None => assert_eq!(host_str, ""), + HostInternal::Ipv4(address) => assert_eq!(host_str, address.to_string()), + HostInternal::Ipv6(address) => { + let h: Host = Host::Ipv6(address); + assert_eq!(host_str, h.to_string()) + } + HostInternal::Domain => { + if SchemeType::from(self.scheme()).is_special() { + assert!(!host_str.is_empty()) + } + } + } + if self.path_start == self.host_end { + assert_eq!(self.port, None); + } else { + assert_eq!(self.byte_at(self.host_end), b':'); + let port_str = self.slice(self.host_end + 1..self.path_start); + assert_eq!(self.port, Some(port_str.parse::().expect("Couldn't parse port?"))); + } + assert_eq!(self.byte_at(self.path_start), b'/'); + } else { + // Anarchist URL (no authority) + assert_eq!(self.username_end, self.scheme_end + 1); + assert_eq!(self.host_start, self.scheme_end + 1); + assert_eq!(self.host_end, self.scheme_end + 1); + assert_eq!(self.host, HostInternal::None); + assert_eq!(self.port, None); + assert_eq!(self.path_start, self.scheme_end + 1); + } + if let Some(start) = self.query_start { + assert!(start > self.path_start); + assert_eq!(self.byte_at(start), b'?'); + } + if let Some(start) = self.fragment_start { + assert!(start > self.path_start); + assert_eq!(self.byte_at(start), b'#'); + } + if let (Some(query_start), Some(fragment_start)) = (self.query_start, self.fragment_start) { + assert!(fragment_start > query_start); + } + + let other = Url::parse(self.as_str()).expect("Failed to parse myself?"); + assert_eq!(&self.serialization, &other.serialization); + assert_eq!(self.scheme_end, other.scheme_end); + assert_eq!(self.username_end, other.username_end); + assert_eq!(self.host_start, other.host_start); + assert_eq!(self.host_end, other.host_end); + assert!(self.host == other.host || + // XXX No host round-trips to empty host. + // See https://github.com/whatwg/url/issues/79 + (self.host_str(), other.host_str()) == (None, Some(""))); + assert_eq!(self.port, other.port); + assert_eq!(self.path_start, other.path_start); + assert_eq!(self.query_start, other.query_start); + assert_eq!(self.fragment_start, other.fragment_start); + Ok(()) + } + + /// Return the origin of this URL (https://url.spec.whatwg.org/#origin) + /// + /// Note: this returns an opaque origin for `file:` URLs, which causes + /// `url.origin() != url.origin()`. + /// + /// # Examples + /// + /// URL with `ftp` scheme: + /// + /// ```rust + /// use url::{Host, Origin, Url}; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://example.com/foo")?; + /// assert_eq!(url.origin(), + /// Origin::Tuple("ftp".into(), + /// Host::Domain("example.com".into()), + /// 21)); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// URL with `blob` scheme: + /// + /// ```rust + /// use url::{Host, Origin, Url}; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("blob:https://example.com/foo")?; + /// assert_eq!(url.origin(), + /// Origin::Tuple("https".into(), + /// Host::Domain("example.com".into()), + /// 443)); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// URL with `file` scheme: + /// + /// ```rust + /// use url::{Host, Origin, Url}; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("file:///tmp/foo")?; + /// assert!(!url.origin().is_tuple()); + /// + /// let other_url = Url::parse("file:///tmp/foo")?; + /// assert!(url.origin() != other_url.origin()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// URL with other scheme: + /// + /// ```rust + /// use url::{Host, Origin, Url}; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("foo:bar")?; + /// assert!(!url.origin().is_tuple()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn origin(&self) -> Origin { + origin::url_origin(self) + } + + /// Return the scheme of this URL, lower-cased, as an ASCII string without the ':' delimiter. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("file:///tmp/foo")?; + /// assert_eq!(url.scheme(), "file"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn scheme(&self) -> &str { + self.slice(..self.scheme_end) + } + + /// Return whether the URL has an 'authority', + /// which can contain a username, password, host, and port number. + /// + /// URLs that do *not* are either path-only like `unix:/run/foo.socket` + /// or cannot-be-a-base like `data:text/plain,Stuff`. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert!(url.has_authority()); + /// + /// let url = Url::parse("unix:/run/foo.socket")?; + /// assert!(!url.has_authority()); + /// + /// let url = Url::parse("data:text/plain,Stuff")?; + /// assert!(!url.has_authority()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn has_authority(&self) -> bool { + debug_assert!(self.byte_at(self.scheme_end) == b':'); + self.slice(self.scheme_end..).starts_with("://") + } + + /// Return whether this URL is a cannot-be-a-base URL, + /// meaning that parsing a relative URL string with this URL as the base will return an error. + /// + /// This is the case if the scheme and `:` delimiter are not followed by a `/` slash, + /// as is typically the case of `data:` and `mailto:` URLs. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert!(!url.cannot_be_a_base()); + /// + /// let url = Url::parse("unix:/run/foo.socket")?; + /// assert!(!url.cannot_be_a_base()); + /// + /// let url = Url::parse("data:text/plain,Stuff")?; + /// assert!(url.cannot_be_a_base()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn cannot_be_a_base(&self) -> bool { + !self.slice(self.path_start..).starts_with('/') + } + + /// Return the username for this URL (typically the empty string) + /// as a percent-encoded ASCII string. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert_eq!(url.username(), "rms"); + /// + /// let url = Url::parse("ftp://:secret123@example.com")?; + /// assert_eq!(url.username(), ""); + /// + /// let url = Url::parse("https://example.com")?; + /// assert_eq!(url.username(), ""); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn username(&self) -> &str { + if self.has_authority() { + self.slice(self.scheme_end + ("://".len() as u32)..self.username_end) + } else { + "" + } + } + + /// Return the password for this URL, if any, as a percent-encoded ASCII string. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://rms:secret123@example.com")?; + /// assert_eq!(url.password(), Some("secret123")); + /// + /// let url = Url::parse("ftp://:secret123@example.com")?; + /// assert_eq!(url.password(), Some("secret123")); + /// + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert_eq!(url.password(), None); + /// + /// let url = Url::parse("https://example.com")?; + /// assert_eq!(url.password(), None); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn password(&self) -> Option<&str> { + // This ':' is not the one marking a port number since a host can not be empty. + // (Except for file: URLs, which do not have port numbers.) + if self.has_authority() && self.byte_at(self.username_end) == b':' { + debug_assert!(self.byte_at(self.host_start - 1) == b'@'); + Some(self.slice(self.username_end + 1..self.host_start - 1)) + } else { + None + } + } + + /// Equivalent to `url.host().is_some()`. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert!(url.has_host()); + /// + /// let url = Url::parse("unix:/run/foo.socket")?; + /// assert!(!url.has_host()); + /// + /// let url = Url::parse("data:text/plain,Stuff")?; + /// assert!(!url.has_host()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn has_host(&self) -> bool { + !matches!(self.host, HostInternal::None) + } + + /// Return the string representation of the host (domain or IP address) for this URL, if any. + /// + /// Non-ASCII domains are punycode-encoded per IDNA. + /// IPv6 addresses are given between `[` and `]` brackets. + /// + /// Cannot-be-a-base URLs (typical of `data:` and `mailto:`) and some `file:` URLs + /// don’t have a host. + /// + /// See also the `host` method. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://127.0.0.1/index.html")?; + /// assert_eq!(url.host_str(), Some("127.0.0.1")); + /// + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert_eq!(url.host_str(), Some("example.com")); + /// + /// let url = Url::parse("unix:/run/foo.socket")?; + /// assert_eq!(url.host_str(), None); + /// + /// let url = Url::parse("data:text/plain,Stuff")?; + /// assert_eq!(url.host_str(), None); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn host_str(&self) -> Option<&str> { + if self.has_host() { + Some(self.slice(self.host_start..self.host_end)) + } else { + None + } + } + + /// Return the parsed representation of the host for this URL. + /// Non-ASCII domain labels are punycode-encoded per IDNA. + /// + /// Cannot-be-a-base URLs (typical of `data:` and `mailto:`) and some `file:` URLs + /// don’t have a host. + /// + /// See also the `host_str` method. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://127.0.0.1/index.html")?; + /// assert!(url.host().is_some()); + /// + /// let url = Url::parse("ftp://rms@example.com")?; + /// assert!(url.host().is_some()); + /// + /// let url = Url::parse("unix:/run/foo.socket")?; + /// assert!(url.host().is_none()); + /// + /// let url = Url::parse("data:text/plain,Stuff")?; + /// assert!(url.host().is_none()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn host(&self) -> Option> { + match self.host { + HostInternal::None => None, + HostInternal::Domain => Some(Host::Domain(self.slice(self.host_start..self.host_end))), + HostInternal::Ipv4(address) => Some(Host::Ipv4(address)), + HostInternal::Ipv6(address) => Some(Host::Ipv6(address)), + } + } + + /// If this URL has a host and it is a domain name (not an IP address), return it. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://127.0.0.1/")?; + /// assert_eq!(url.domain(), None); + /// + /// let url = Url::parse("mailto:rms@example.net")?; + /// assert_eq!(url.domain(), None); + /// + /// let url = Url::parse("https://example.com/")?; + /// assert_eq!(url.domain(), Some("example.com")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn domain(&self) -> Option<&str> { + match self.host { + HostInternal::Domain => Some(self.slice(self.host_start..self.host_end)), + _ => None, + } + } + + /// Return the port number for this URL, if any. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.com")?; + /// assert_eq!(url.port(), None); + /// + /// let url = Url::parse("ssh://example.com:22")?; + /// assert_eq!(url.port(), Some(22)); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn port(&self) -> Option { + self.port + } + + /// Return the port number for this URL, or the default port number if it is known. + /// + /// This method only knows the default port number + /// of the `http`, `https`, `ws`, `wss`, `ftp`, and `gopher` schemes. + /// + /// For URLs in these schemes, this method always returns `Some(_)`. + /// For other schemes, it is the same as `Url::port()`. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("foo://example.com")?; + /// assert_eq!(url.port_or_known_default(), None); + /// + /// let url = Url::parse("foo://example.com:1456")?; + /// assert_eq!(url.port_or_known_default(), Some(1456)); + /// + /// let url = Url::parse("https://example.com")?; + /// assert_eq!(url.port_or_known_default(), Some(443)); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + #[inline] + pub fn port_or_known_default(&self) -> Option { + self.port.or_else(|| parser::default_port(self.scheme())) + } + + /// If the URL has a host, return something that implements `ToSocketAddrs`. + /// + /// If the URL has no port number and the scheme’s default port number is not known + /// (see `Url::port_or_known_default`), + /// the closure is called to obtain a port number. + /// Typically, this closure can match on the result `Url::scheme` + /// to have per-scheme default port numbers, + /// and panic for schemes it’s not prepared to handle. + /// For example: + /// + /// ```rust + /// # use url::Url; + /// # use std::net::TcpStream; + /// # use std::io; + /// fn connect(url: &Url) -> io::Result { + /// TcpStream::connect(url.with_default_port(default_port)?) + /// } + /// + /// fn default_port(url: &Url) -> Result { + /// match url.scheme() { + /// "git" => Ok(9418), + /// "git+ssh" => Ok(22), + /// "git+https" => Ok(443), + /// "git+http" => Ok(80), + /// _ => Err(()), + /// } + /// } + /// ``` + pub fn with_default_port(&self, f: F) -> io::Result> + where F: FnOnce(&Url) -> Result { + Ok(HostAndPort { + host: self.host() + .ok_or(()) + .or_else(|()| io_error("URL has no host"))?, + port: self.port_or_known_default() + .ok_or(()) + .or_else(|()| f(self)) + .or_else(|()| io_error("URL has no port number"))? + }) + } + + /// Return the path for this URL, as a percent-encoded ASCII string. + /// For cannot-be-a-base URLs, this is an arbitrary string that doesn’t start with '/'. + /// For other URLs, this starts with a '/' slash + /// and continues with slash-separated path segments. + /// + /// # Examples + /// + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.com/api/versions?page=2")?; + /// assert_eq!(url.path(), "/api/versions"); + /// + /// let url = Url::parse("https://example.com")?; + /// assert_eq!(url.path(), "/"); + /// + /// let url = Url::parse("https://example.com/countries/việt nam")?; + /// assert_eq!(url.path(), "/countries/vi%E1%BB%87t%20nam"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn path(&self) -> &str { + match (self.query_start, self.fragment_start) { + (None, None) => self.slice(self.path_start..), + (Some(next_component_start), _) | + (None, Some(next_component_start)) => { + self.slice(self.path_start..next_component_start) + } + } + } + + /// Unless this URL is cannot-be-a-base, + /// return an iterator of '/' slash-separated path segments, + /// each as a percent-encoded ASCII string. + /// + /// Return `None` for cannot-be-a-base URLs. + /// + /// When `Some` is returned, the iterator always contains at least one string + /// (which may be empty). + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let url = Url::parse("https://example.com/foo/bar")?; + /// let mut path_segments = url.path_segments().ok_or_else(|| "cannot be base")?; + /// assert_eq!(path_segments.next(), Some("foo")); + /// assert_eq!(path_segments.next(), Some("bar")); + /// assert_eq!(path_segments.next(), None); + /// + /// let url = Url::parse("https://example.com")?; + /// let mut path_segments = url.path_segments().ok_or_else(|| "cannot be base")?; + /// assert_eq!(path_segments.next(), Some("")); + /// assert_eq!(path_segments.next(), None); + /// + /// let url = Url::parse("data:text/plain,HelloWorld")?; + /// assert!(url.path_segments().is_none()); + /// + /// let url = Url::parse("https://example.com/countries/việt nam")?; + /// let mut path_segments = url.path_segments().ok_or_else(|| "cannot be base")?; + /// assert_eq!(path_segments.next(), Some("countries")); + /// assert_eq!(path_segments.next(), Some("vi%E1%BB%87t%20nam")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn path_segments(&self) -> Option> { + let path = self.path(); + if path.starts_with('/') { + Some(path[1..].split('/')) + } else { + None + } + } + + /// Return this URL’s query string, if any, as a percent-encoded ASCII string. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.com/products?page=2")?; + /// let query = url.query(); + /// assert_eq!(query, Some("page=2")); + /// + /// let url = Url::parse("https://example.com/products")?; + /// let query = url.query(); + /// assert!(query.is_none()); + /// + /// let url = Url::parse("https://example.com/?country=español")?; + /// let query = url.query(); + /// assert_eq!(query, Some("country=espa%C3%B1ol")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn query(&self) -> Option<&str> { + match (self.query_start, self.fragment_start) { + (None, _) => None, + (Some(query_start), None) => { + debug_assert!(self.byte_at(query_start) == b'?'); + Some(self.slice(query_start + 1..)) + } + (Some(query_start), Some(fragment_start)) => { + debug_assert!(self.byte_at(query_start) == b'?'); + Some(self.slice(query_start + 1..fragment_start)) + } + } + } + + /// Parse the URL’s query string, if any, as `application/x-www-form-urlencoded` + /// and return an iterator of (key, value) pairs. + /// + /// # Examples + /// + /// ```rust + /// use std::borrow::Cow; + /// + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.com/products?page=2&sort=desc")?; + /// let mut pairs = url.query_pairs(); + /// + /// assert_eq!(pairs.count(), 2); + /// + /// assert_eq!(pairs.next(), Some((Cow::Borrowed("page"), Cow::Borrowed("2")))); + /// assert_eq!(pairs.next(), Some((Cow::Borrowed("sort"), Cow::Borrowed("desc")))); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// + + #[inline] + pub fn query_pairs(&self) -> form_urlencoded::Parse { + form_urlencoded::parse(self.query().unwrap_or("").as_bytes()) + } + + /// Return this URL’s fragment identifier, if any. + /// + /// A fragment is the part of the URL after the `#` symbol. + /// The fragment is optional and, if present, contains a fragment identifier + /// that identifies a secondary resource, such as a section heading + /// of a document. + /// + /// In HTML, the fragment identifier is usually the id attribute of a an element + /// that is scrolled to on load. Browsers typically will not send the fragment portion + /// of a URL to the server. + /// + /// **Note:** the parser did *not* percent-encode this component, + /// but the input may have been percent-encoded already. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let url = Url::parse("https://example.com/data.csv#row=4")?; + /// + /// assert_eq!(url.fragment(), Some("row=4")); + /// + /// let url = Url::parse("https://example.com/data.csv#cell=4,1-6,2")?; + /// + /// assert_eq!(url.fragment(), Some("cell=4,1-6,2")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn fragment(&self) -> Option<&str> { + self.fragment_start.map(|start| { + debug_assert!(self.byte_at(start) == b'#'); + self.slice(start + 1..) + }) + } + + fn mutate R, R>(&mut self, f: F) -> R { + let mut parser = Parser::for_setter(mem::replace(&mut self.serialization, String::new())); + let result = f(&mut parser); + self.serialization = parser.serialization; + result + } + + /// Change this URL’s fragment identifier. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.com/data.csv")?; + /// assert_eq!(url.as_str(), "https://example.com/data.csv"); + + /// url.set_fragment(Some("cell=4,1-6,2")); + /// assert_eq!(url.as_str(), "https://example.com/data.csv#cell=4,1-6,2"); + /// assert_eq!(url.fragment(), Some("cell=4,1-6,2")); + /// + /// url.set_fragment(None); + /// assert_eq!(url.as_str(), "https://example.com/data.csv"); + /// assert!(url.fragment().is_none()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_fragment(&mut self, fragment: Option<&str>) { + // Remove any previous fragment + if let Some(start) = self.fragment_start { + debug_assert!(self.byte_at(start) == b'#'); + self.serialization.truncate(start as usize); + } + // Write the new one + if let Some(input) = fragment { + self.fragment_start = Some(to_u32(self.serialization.len()).unwrap()); + self.serialization.push('#'); + self.mutate(|parser| parser.parse_fragment(parser::Input::new(input))) + } else { + self.fragment_start = None + } + } + + fn take_fragment(&mut self) -> Option { + self.fragment_start.take().map(|start| { + debug_assert!(self.byte_at(start) == b'#'); + let fragment = self.slice(start + 1..).to_owned(); + self.serialization.truncate(start as usize); + fragment + }) + } + + fn restore_already_parsed_fragment(&mut self, fragment: Option) { + if let Some(ref fragment) = fragment { + assert!(self.fragment_start.is_none()); + self.fragment_start = Some(to_u32(self.serialization.len()).unwrap()); + self.serialization.push('#'); + self.serialization.push_str(fragment); + } + } + + /// Change this URL’s query string. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.com/products")?; + /// assert_eq!(url.as_str(), "https://example.com/products"); + /// + /// url.set_query(Some("page=2")); + /// assert_eq!(url.as_str(), "https://example.com/products?page=2"); + /// assert_eq!(url.query(), Some("page=2")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_query(&mut self, query: Option<&str>) { + let fragment = self.take_fragment(); + + // Remove any previous query + if let Some(start) = self.query_start.take() { + debug_assert!(self.byte_at(start) == b'?'); + self.serialization.truncate(start as usize); + } + // Write the new query, if any + if let Some(input) = query { + self.query_start = Some(to_u32(self.serialization.len()).unwrap()); + self.serialization.push('?'); + let scheme_end = self.scheme_end; + self.mutate(|parser| parser.parse_query(scheme_end, parser::Input::new(input))); + } + + self.restore_already_parsed_fragment(fragment); + } + + /// Manipulate this URL’s query string, viewed as a sequence of name/value pairs + /// in `application/x-www-form-urlencoded` syntax. + /// + /// The return value has a method-chaining API: + /// + /// ```rust + /// # use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.net?lang=fr#nav")?; + /// assert_eq!(url.query(), Some("lang=fr")); + /// + /// url.query_pairs_mut().append_pair("foo", "bar"); + /// assert_eq!(url.query(), Some("lang=fr&foo=bar")); + /// assert_eq!(url.as_str(), "https://example.net/?lang=fr&foo=bar#nav"); + /// + /// url.query_pairs_mut() + /// .clear() + /// .append_pair("foo", "bar & baz") + /// .append_pair("saisons", "\u{00C9}t\u{00E9}+hiver"); + /// assert_eq!(url.query(), Some("foo=bar+%26+baz&saisons=%C3%89t%C3%A9%2Bhiver")); + /// assert_eq!(url.as_str(), + /// "https://example.net/?foo=bar+%26+baz&saisons=%C3%89t%C3%A9%2Bhiver#nav"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Note: `url.query_pairs_mut().clear();` is equivalent to `url.set_query(Some(""))`, + /// not `url.set_query(None)`. + /// + /// The state of `Url` is unspecified if this return value is leaked without being dropped. + pub fn query_pairs_mut(&mut self) -> form_urlencoded::Serializer { + let fragment = self.take_fragment(); + + let query_start; + if let Some(start) = self.query_start { + debug_assert!(self.byte_at(start) == b'?'); + query_start = start as usize; + } else { + query_start = self.serialization.len(); + self.query_start = Some(to_u32(query_start).unwrap()); + self.serialization.push('?'); + } + + let query = UrlQuery { url: self, fragment: fragment }; + form_urlencoded::Serializer::for_suffix(query, query_start + "?".len()) + } + + fn take_after_path(&mut self) -> String { + match (self.query_start, self.fragment_start) { + (Some(i), _) | (None, Some(i)) => { + let after_path = self.slice(i..).to_owned(); + self.serialization.truncate(i as usize); + after_path + }, + (None, None) => String::new(), + } + } + + /// Change this URL’s path. + /// + /// # Examples + /// + /// ```rust + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.com")?; + /// url.set_path("api/comments"); + /// assert_eq!(url.as_str(), "https://example.com/api/comments"); + /// assert_eq!(url.path(), "/api/comments"); + /// + /// let mut url = Url::parse("https://example.com/api")?; + /// url.set_path("data/report.csv"); + /// assert_eq!(url.as_str(), "https://example.com/data/report.csv"); + /// assert_eq!(url.path(), "/data/report.csv"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_path(&mut self, mut path: &str) { + let after_path = self.take_after_path(); + let old_after_path_pos = to_u32(self.serialization.len()).unwrap(); + let cannot_be_a_base = self.cannot_be_a_base(); + let scheme_type = SchemeType::from(self.scheme()); + self.serialization.truncate(self.path_start as usize); + self.mutate(|parser| { + if cannot_be_a_base { + if path.starts_with('/') { + parser.serialization.push_str("%2F"); + path = &path[1..]; + } + parser.parse_cannot_be_a_base_path(parser::Input::new(path)); + } else { + let mut has_host = true; // FIXME + parser.parse_path_start(scheme_type, &mut has_host, parser::Input::new(path)); + } + }); + self.restore_after_path(old_after_path_pos, &after_path); + } + + /// Return an object with methods to manipulate this URL’s path segments. + /// + /// Return `Err(())` if this URL is cannot-be-a-base. + pub fn path_segments_mut(&mut self) -> Result { + if self.cannot_be_a_base() { + Err(()) + } else { + Ok(path_segments::new(self)) + } + } + + fn restore_after_path(&mut self, old_after_path_position: u32, after_path: &str) { + let new_after_path_position = to_u32(self.serialization.len()).unwrap(); + let adjust = |index: &mut u32| { + *index -= old_after_path_position; + *index += new_after_path_position; + }; + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + self.serialization.push_str(after_path) + } + + /// Change this URL’s port number. + /// + /// If this URL is cannot-be-a-base, does not have a host, or has the `file` scheme; + /// do nothing and return `Err`. + /// + /// # Examples + /// + /// ``` + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let mut url = Url::parse("ssh://example.net:2048/")?; + /// + /// url.set_port(Some(4096)).map_err(|_| "cannot be base")?; + /// assert_eq!(url.as_str(), "ssh://example.net:4096/"); + /// + /// url.set_port(None).map_err(|_| "cannot be base")?; + /// assert_eq!(url.as_str(), "ssh://example.net/"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Cannot set port for cannot-be-a-base URLs: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rms@example.net")?; + /// + /// let result = url.set_port(Some(80)); + /// assert!(result.is_err()); + /// + /// let result = url.set_port(None); + /// assert!(result.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_port(&mut self, mut port: Option) -> Result<(), ()> { + if !self.has_host() || self.scheme() == "file" { + return Err(()) + } + if port.is_some() && port == parser::default_port(self.scheme()) { + port = None + } + self.set_port_internal(port); + Ok(()) + } + + fn set_port_internal(&mut self, port: Option) { + match (self.port, port) { + (None, None) => {} + (Some(_), None) => { + self.serialization.drain(self.host_end as usize .. self.path_start as usize); + let offset = self.path_start - self.host_end; + self.path_start = self.host_end; + if let Some(ref mut index) = self.query_start { *index -= offset } + if let Some(ref mut index) = self.fragment_start { *index -= offset } + } + (Some(old), Some(new)) if old == new => {} + (_, Some(new)) => { + let path_and_after = self.slice(self.path_start..).to_owned(); + self.serialization.truncate(self.host_end as usize); + write!(&mut self.serialization, ":{}", new).unwrap(); + let old_path_start = self.path_start; + let new_path_start = to_u32(self.serialization.len()).unwrap(); + self.path_start = new_path_start; + let adjust = |index: &mut u32| { + *index -= old_path_start; + *index += new_path_start; + }; + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + self.serialization.push_str(&path_and_after); + } + } + self.port = port; + } + + /// Change this URL’s host. + /// + /// If this URL is cannot-be-a-base or there is an error parsing the given `host`, + /// do nothing and return `Err`. + /// + /// Removing the host (calling this with `None`) + /// will also remove any username, password, and port number. + /// + /// # Examples + /// + /// Change host: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.net")?; + /// let result = url.set_host(Some("rust-lang.org")); + /// assert!(result.is_ok()); + /// assert_eq!(url.as_str(), "https://rust-lang.org/"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Remove host: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("foo://example.net")?; + /// let result = url.set_host(None); + /// assert!(result.is_ok()); + /// assert_eq!(url.as_str(), "foo:/"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Cannot remove host for 'special' schemes (e.g. `http`): + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.net")?; + /// let result = url.set_host(None); + /// assert!(result.is_err()); + /// assert_eq!(url.as_str(), "https://example.net/"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Cannot change or remove host for cannot-be-a-base URLs: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rms@example.net")?; + /// + /// let result = url.set_host(Some("rust-lang.org")); + /// assert!(result.is_err()); + /// assert_eq!(url.as_str(), "mailto:rms@example.net"); + /// + /// let result = url.set_host(None); + /// assert!(result.is_err()); + /// assert_eq!(url.as_str(), "mailto:rms@example.net"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_host(&mut self, host: Option<&str>) -> Result<(), ParseError> { + if self.cannot_be_a_base() { + return Err(ParseError::SetHostOnCannotBeABaseUrl) + } + + if let Some(host) = host { + if host == "" && SchemeType::from(self.scheme()).is_special() { + return Err(ParseError::EmptyHost); + } + self.set_host_internal(Host::parse(host)?, None) + } else if self.has_host() { + if SchemeType::from(self.scheme()).is_special() { + return Err(ParseError::EmptyHost) + } + debug_assert!(self.byte_at(self.scheme_end) == b':'); + debug_assert!(self.byte_at(self.path_start) == b'/'); + let new_path_start = self.scheme_end + 1; + self.serialization.drain(new_path_start as usize..self.path_start as usize); + let offset = self.path_start - new_path_start; + self.path_start = new_path_start; + self.username_end = new_path_start; + self.host_start = new_path_start; + self.host_end = new_path_start; + self.port = None; + if let Some(ref mut index) = self.query_start { *index -= offset } + if let Some(ref mut index) = self.fragment_start { *index -= offset } + } + Ok(()) + } + + /// opt_new_port: None means leave unchanged, Some(None) means remove any port number. + fn set_host_internal(&mut self, host: Host, opt_new_port: Option>) { + let old_suffix_pos = if opt_new_port.is_some() { self.path_start } else { self.host_end }; + let suffix = self.slice(old_suffix_pos..).to_owned(); + self.serialization.truncate(self.host_start as usize); + if !self.has_authority() { + debug_assert!(self.slice(self.scheme_end..self.host_start) == ":"); + debug_assert!(self.username_end == self.host_start); + self.serialization.push('/'); + self.serialization.push('/'); + self.username_end += 2; + self.host_start += 2; + } + write!(&mut self.serialization, "{}", host).unwrap(); + self.host_end = to_u32(self.serialization.len()).unwrap(); + self.host = host.into(); + + if let Some(new_port) = opt_new_port { + self.port = new_port; + if let Some(port) = new_port { + write!(&mut self.serialization, ":{}", port).unwrap(); + } + } + let new_suffix_pos = to_u32(self.serialization.len()).unwrap(); + self.serialization.push_str(&suffix); + + let adjust = |index: &mut u32| { + *index -= old_suffix_pos; + *index += new_suffix_pos; + }; + adjust(&mut self.path_start); + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + } + + /// Change this URL’s host to the given IP address. + /// + /// If this URL is cannot-be-a-base, do nothing and return `Err`. + /// + /// Compared to `Url::set_host`, this skips the host parser. + /// + /// # Examples + /// + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("http://example.com")?; + /// url.set_ip_host("127.0.0.1".parse().unwrap()); + /// assert_eq!(url.host_str(), Some("127.0.0.1")); + /// assert_eq!(url.as_str(), "http://127.0.0.1/"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Cannot change URL's from mailto(cannot-be-base) to ip: + /// + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rms@example.com")?; + /// let result = url.set_ip_host("127.0.0.1".parse().unwrap()); + /// + /// assert_eq!(url.as_str(), "mailto:rms@example.com"); + /// assert!(result.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + pub fn set_ip_host(&mut self, address: IpAddr) -> Result<(), ()> { + if self.cannot_be_a_base() { + return Err(()) + } + + let address = match address { + IpAddr::V4(address) => Host::Ipv4(address), + IpAddr::V6(address) => Host::Ipv6(address), + }; + self.set_host_internal(address, None); + Ok(()) + } + + /// Change this URL’s password. + /// + /// If this URL is cannot-be-a-base or does not have a host, do nothing and return `Err`. + /// + /// # Examples + /// + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rmz@example.com")?; + /// let result = url.set_password(Some("secret_password")); + /// assert!(result.is_err()); + /// + /// let mut url = Url::parse("ftp://user1:secret1@example.com")?; + /// let result = url.set_password(Some("secret_password")); + /// assert_eq!(url.password(), Some("secret_password")); + /// + /// let mut url = Url::parse("ftp://user2:@example.com")?; + /// let result = url.set_password(Some("secret2")); + /// assert!(result.is_ok()); + /// assert_eq!(url.password(), Some("secret2")); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_password(&mut self, password: Option<&str>) -> Result<(), ()> { + if !self.has_host() { + return Err(()) + } + if let Some(password) = password { + let host_and_after = self.slice(self.host_start..).to_owned(); + self.serialization.truncate(self.username_end as usize); + self.serialization.push(':'); + self.serialization.extend(utf8_percent_encode(password, USERINFO_ENCODE_SET)); + self.serialization.push('@'); + + let old_host_start = self.host_start; + let new_host_start = to_u32(self.serialization.len()).unwrap(); + let adjust = |index: &mut u32| { + *index -= old_host_start; + *index += new_host_start; + }; + self.host_start = new_host_start; + adjust(&mut self.host_end); + adjust(&mut self.path_start); + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + + self.serialization.push_str(&host_and_after); + } else if self.byte_at(self.username_end) == b':' { // If there is a password to remove + let has_username_or_password = self.byte_at(self.host_start - 1) == b'@'; + debug_assert!(has_username_or_password); + let username_start = self.scheme_end + 3; + let empty_username = username_start == self.username_end; + let start = self.username_end; // Remove the ':' + let end = if empty_username { + self.host_start // Remove the '@' as well + } else { + self.host_start - 1 // Keep the '@' to separate the username from the host + }; + self.serialization.drain(start as usize .. end as usize); + let offset = end - start; + self.host_start -= offset; + self.host_end -= offset; + self.path_start -= offset; + if let Some(ref mut index) = self.query_start { *index -= offset } + if let Some(ref mut index) = self.fragment_start { *index -= offset } + } + Ok(()) + } + + /// Change this URL’s username. + /// + /// If this URL is cannot-be-a-base or does not have a host, do nothing and return `Err`. + /// # Examples + /// + /// Cannot setup username from mailto(cannot-be-base) + /// + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rmz@example.com")?; + /// let result = url.set_username("user1"); + /// assert_eq!(url.as_str(), "mailto:rmz@example.com"); + /// assert!(result.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Setup username to user1 + /// ```rust + /// use url::{Url, ParseError}; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("ftp://:secre1@example.com")?; + /// let result = url.set_username("user1"); + /// assert!(result.is_ok()); + /// assert_eq!(url.username(), "user1"); + /// assert_eq!(url.as_str(), "ftp://user1:secre1@example.com"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_username(&mut self, username: &str) -> Result<(), ()> { + if !self.has_host() { + return Err(()) + } + let username_start = self.scheme_end + 3; + debug_assert!(self.slice(self.scheme_end..username_start) == "://"); + if self.slice(username_start..self.username_end) == username { + return Ok(()) + } + let after_username = self.slice(self.username_end..).to_owned(); + self.serialization.truncate(username_start as usize); + self.serialization.extend(utf8_percent_encode(username, USERINFO_ENCODE_SET)); + + let mut removed_bytes = self.username_end; + self.username_end = to_u32(self.serialization.len()).unwrap(); + let mut added_bytes = self.username_end; + + let new_username_is_empty = self.username_end == username_start; + match (new_username_is_empty, after_username.chars().next()) { + (true, Some('@')) => { + removed_bytes += 1; + self.serialization.push_str(&after_username[1..]); + } + (false, Some('@')) | (_, Some(':')) | (true, _) => { + self.serialization.push_str(&after_username); + } + (false, _) => { + added_bytes += 1; + self.serialization.push('@'); + self.serialization.push_str(&after_username); + } + } + + let adjust = |index: &mut u32| { + *index -= removed_bytes; + *index += added_bytes; + }; + adjust(&mut self.host_start); + adjust(&mut self.host_end); + adjust(&mut self.path_start); + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + Ok(()) + } + + /// Change this URL’s scheme. + /// + /// Do nothing and return `Err` if: + /// + /// * The new scheme is not in `[a-zA-Z][a-zA-Z0-9+.-]+` + /// * This URL is cannot-be-a-base and the new scheme is one of + /// `http`, `https`, `ws`, `wss`, `ftp`, or `gopher` + /// + /// # Examples + /// + /// Change the URL’s scheme from `https` to `foo`: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.net")?; + /// let result = url.set_scheme("foo"); + /// assert_eq!(url.as_str(), "foo://example.net/"); + /// assert!(result.is_ok()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// + /// Cannot change URL’s scheme from `https` to `foõ`: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("https://example.net")?; + /// let result = url.set_scheme("foõ"); + /// assert_eq!(url.as_str(), "https://example.net/"); + /// assert!(result.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// Cannot change URL’s scheme from `mailto` (cannot-be-a-base) to `https`: + /// + /// ``` + /// use url::Url; + /// # use url::ParseError; + /// + /// # fn run() -> Result<(), ParseError> { + /// let mut url = Url::parse("mailto:rms@example.net")?; + /// let result = url.set_scheme("https"); + /// assert_eq!(url.as_str(), "mailto:rms@example.net"); + /// assert!(result.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn set_scheme(&mut self, scheme: &str) -> Result<(), ()> { + let mut parser = Parser::for_setter(String::new()); + let remaining = parser.parse_scheme(parser::Input::new(scheme))?; + if !remaining.is_empty() || + (!self.has_host() && SchemeType::from(&parser.serialization).is_special()) { + return Err(()) + } + let old_scheme_end = self.scheme_end; + let new_scheme_end = to_u32(parser.serialization.len()).unwrap(); + let adjust = |index: &mut u32| { + *index -= old_scheme_end; + *index += new_scheme_end; + }; + + self.scheme_end = new_scheme_end; + adjust(&mut self.username_end); + adjust(&mut self.host_start); + adjust(&mut self.host_end); + adjust(&mut self.path_start); + if let Some(ref mut index) = self.query_start { adjust(index) } + if let Some(ref mut index) = self.fragment_start { adjust(index) } + + parser.serialization.push_str(self.slice(old_scheme_end..)); + self.serialization = parser.serialization; + Ok(()) + } + + /// Convert a file name as `std::path::Path` into an URL in the `file` scheme. + /// + /// This returns `Err` if the given path is not absolute or, + /// on Windows, if the prefix is not a disk prefix (e.g. `C:`) or a UNC prefix (`\\`). + /// + /// # Examples + /// + /// On Unix-like platforms: + /// + /// ``` + /// # if cfg!(unix) { + /// use url::Url; + /// + /// # fn run() -> Result<(), ()> { + /// let url = Url::from_file_path("/tmp/foo.txt")?; + /// assert_eq!(url.as_str(), "file:///tmp/foo.txt"); + /// + /// let url = Url::from_file_path("../foo.txt"); + /// assert!(url.is_err()); + /// + /// let url = Url::from_file_path("https://google.com/"); + /// assert!(url.is_err()); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// # } + /// ``` + pub fn from_file_path>(path: P) -> Result { + let mut serialization = "file://".to_owned(); + let host_start = serialization.len() as u32; + let (host_end, host) = path_to_file_url_segments(path.as_ref(), &mut serialization)?; + Ok(Url { + serialization: serialization, + scheme_end: "file".len() as u32, + username_end: host_start, + host_start: host_start, + host_end: host_end, + host: host, + port: None, + path_start: host_end, + query_start: None, + fragment_start: None, + }) + } + + /// Convert a directory name as `std::path::Path` into an URL in the `file` scheme. + /// + /// This returns `Err` if the given path is not absolute or, + /// on Windows, if the prefix is not a disk prefix (e.g. `C:`) or a UNC prefix (`\\`). + /// + /// Compared to `from_file_path`, this ensure that URL’s the path has a trailing slash + /// so that the entire path is considered when using this URL as a base URL. + /// + /// For example: + /// + /// * `"index.html"` parsed with `Url::from_directory_path(Path::new("/var/www"))` + /// as the base URL is `file:///var/www/index.html` + /// * `"index.html"` parsed with `Url::from_file_path(Path::new("/var/www"))` + /// as the base URL is `file:///var/index.html`, which might not be what was intended. + /// + /// Note that `std::path` does not consider trailing slashes significant + /// and usually does not include them (e.g. in `Path::parent()`). + pub fn from_directory_path>(path: P) -> Result { + let mut url = Url::from_file_path(path)?; + if !url.serialization.ends_with('/') { + url.serialization.push('/') + } + Ok(url) + } + + /// Serialize with Serde using the internal representation of the `Url` struct. + /// + /// The corresponding `deserialize_internal` method sacrifices some invariant-checking + /// for speed, compared to the `Deserialize` trait impl. + /// + /// This method is only available if the `serde` Cargo feature is enabled. + #[cfg(feature = "serde")] + #[deny(unused)] + pub fn serialize_internal(&self, serializer: &mut S) -> Result<(), S::Error> where S: serde::Serializer { + use serde::Serialize; + // Destructuring first lets us ensure that adding or removing fields forces this method + // to be updated + let Url { ref serialization, ref scheme_end, + ref username_end, ref host_start, + ref host_end, ref host, ref port, + ref path_start, ref query_start, + ref fragment_start} = *self; + (serialization, scheme_end, username_end, + host_start, host_end, host, port, path_start, + query_start, fragment_start).serialize(serializer) + } + + /// Serialize with Serde using the internal representation of the `Url` struct. + /// + /// The corresponding `deserialize_internal` method sacrifices some invariant-checking + /// for speed, compared to the `Deserialize` trait impl. + /// + /// This method is only available if the `serde` Cargo feature is enabled. + #[cfg(feature = "serde")] + #[deny(unused)] + pub fn deserialize_internal(deserializer: &mut D) -> Result where D: serde::Deserializer { + use serde::{Deserialize, Error}; + let (serialization, scheme_end, username_end, + host_start, host_end, host, port, path_start, + query_start, fragment_start) = Deserialize::deserialize(deserializer)?; + let url = Url { + serialization: serialization, + scheme_end: scheme_end, + username_end: username_end, + host_start: host_start, + host_end: host_end, + host: host, + port: port, + path_start: path_start, + query_start: query_start, + fragment_start: fragment_start + }; + if cfg!(debug_assertions) { + url.check_invariants().map_err(|ref reason| Error::invalid_value(&reason))? + } + Ok(url) + } + + + /// Assuming the URL is in the `file` scheme or similar, + /// convert its path to an absolute `std::path::Path`. + /// + /// **Note:** This does not actually check the URL’s `scheme`, + /// and may give nonsensical results for other schemes. + /// It is the user’s responsibility to check the URL’s scheme before calling this. + /// + /// ``` + /// # use url::Url; + /// # let url = Url::parse("file:///etc/passwd").unwrap(); + /// let path = url.to_file_path(); + /// ``` + /// + /// Returns `Err` if the host is neither empty nor `"localhost"` (except on Windows, where + /// `file:` URLs may have a non-local host), + /// or if `Path::new_opt()` returns `None`. + /// (That is, if the percent-decoded path contains a NUL byte or, + /// for a Windows path, is not UTF-8.) + #[inline] + pub fn to_file_path(&self) -> Result { + if let Some(segments) = self.path_segments() { + let host = match self.host() { + None | Some(Host::Domain("localhost")) => None, + Some(_) if cfg!(windows) && self.scheme() == "file" => { + Some(&self.serialization[self.host_start as usize .. self.host_end as usize]) + }, + _ => return Err(()) + }; + + return file_url_segments_to_pathbuf(host, segments); + } + Err(()) + } + + // Private helper methods: + + #[inline] + fn slice(&self, range: R) -> &str where R: RangeArg { + range.slice_of(&self.serialization) + } + + #[inline] + fn byte_at(&self, i: u32) -> u8 { + self.serialization.as_bytes()[i as usize] + } +} + +/// Return an error if `Url::host` or `Url::port_or_known_default` return `None`. +impl ToSocketAddrs for Url { + type Iter = SocketAddrs; + + fn to_socket_addrs(&self) -> io::Result { + self.with_default_port(|_| Err(()))?.to_socket_addrs() + } +} + +/// Parse a string as an URL, without a base URL or encoding override. +impl str::FromStr for Url { + type Err = ParseError; + + #[inline] + fn from_str(input: &str) -> Result { + Url::parse(input) + } +} + +/// Display the serialization of this URL. +impl fmt::Display for Url { + #[inline] + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.serialization, formatter) + } +} + +/// Debug the serialization of this URL. +impl fmt::Debug for Url { + #[inline] + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.serialization, formatter) + } +} + +/// URLs compare like their serialization. +impl Eq for Url {} + +/// URLs compare like their serialization. +impl PartialEq for Url { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.serialization == other.serialization + } +} + +/// URLs compare like their serialization. +impl Ord for Url { + #[inline] + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.serialization.cmp(&other.serialization) + } +} + +/// URLs compare like their serialization. +impl PartialOrd for Url { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + self.serialization.partial_cmp(&other.serialization) + } +} + +/// URLs hash like their serialization. +impl hash::Hash for Url { + #[inline] + fn hash(&self, state: &mut H) where H: hash::Hasher { + hash::Hash::hash(&self.serialization, state) + } +} + +/// Return the serialization of this URL. +impl AsRef for Url { + #[inline] + fn as_ref(&self) -> &str { + &self.serialization + } +} + +trait RangeArg { + fn slice_of<'a>(&self, s: &'a str) -> &'a str; +} + +impl RangeArg for Range { + #[inline] + fn slice_of<'a>(&self, s: &'a str) -> &'a str { + &s[self.start as usize .. self.end as usize] + } +} + +impl RangeArg for RangeFrom { + #[inline] + fn slice_of<'a>(&self, s: &'a str) -> &'a str { + &s[self.start as usize ..] + } +} + +impl RangeArg for RangeTo { + #[inline] + fn slice_of<'a>(&self, s: &'a str) -> &'a str { + &s[.. self.end as usize] + } +} + +#[cfg(feature="rustc-serialize")] +impl rustc_serialize::Encodable for Url { + fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { + encoder.emit_str(self.as_str()) + } +} + + +#[cfg(feature="rustc-serialize")] +impl rustc_serialize::Decodable for Url { + fn decode(decoder: &mut D) -> Result { + Url::parse(&*decoder.read_str()?).map_err(|error| { + decoder.error(&format!("URL parsing error: {}", error)) + }) + } +} + +/// Serializes this URL into a `serde` stream. +/// +/// This implementation is only available if the `serde` Cargo feature is enabled. +#[cfg(feature="serde")] +impl serde::Serialize for Url { + fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> where S: serde::Serializer { + serializer.serialize_str(self.as_str()) + } +} + +/// Deserializes this URL from a `serde` stream. +/// +/// This implementation is only available if the `serde` Cargo feature is enabled. +#[cfg(feature="serde")] +impl serde::Deserialize for Url { + fn deserialize(deserializer: &mut D) -> Result where D: serde::Deserializer { + let string_representation: String = serde::Deserialize::deserialize(deserializer)?; + Url::parse(&string_representation).map_err(|err| { + serde::Error::invalid_value(err.description()) + }) + } +} + +#[cfg(any(unix, target_os = "redox"))] +fn path_to_file_url_segments(path: &Path, serialization: &mut String) + -> Result<(u32, HostInternal), ()> { + use std::os::unix::prelude::OsStrExt; + if !path.is_absolute() { + return Err(()) + } + let host_end = to_u32(serialization.len()).unwrap(); + let mut empty = true; + // skip the root component + for component in path.components().skip(1) { + empty = false; + serialization.push('/'); + serialization.extend(percent_encode( + component.as_os_str().as_bytes(), PATH_SEGMENT_ENCODE_SET)); + } + if empty { + // An URL’s path must not be empty. + serialization.push('/'); + } + Ok((host_end, HostInternal::None)) +} + +#[cfg(windows)] +fn path_to_file_url_segments(path: &Path, serialization: &mut String) + -> Result<(u32, HostInternal), ()> { + path_to_file_url_segments_windows(path, serialization) +} + +// Build this unconditionally to alleviate https://github.com/servo/rust-url/issues/102 +#[cfg_attr(not(windows), allow(dead_code))] +fn path_to_file_url_segments_windows(path: &Path, serialization: &mut String) + -> Result<(u32, HostInternal), ()> { + use std::path::{Prefix, Component}; + if !path.is_absolute() { + return Err(()) + } + let mut components = path.components(); + + let host_end; + let host_internal; + match components.next() { + Some(Component::Prefix(ref p)) => match p.kind() { + Prefix::Disk(letter) | Prefix::VerbatimDisk(letter) => { + host_end = to_u32(serialization.len()).unwrap(); + host_internal = HostInternal::None; + serialization.push('/'); + serialization.push(letter as char); + serialization.push(':'); + }, + Prefix::UNC(server, share) | Prefix::VerbatimUNC(server, share) => { + let host = Host::parse(server.to_str().ok_or(())?).map_err(|_| ())?; + write!(serialization, "{}", host).unwrap(); + host_end = to_u32(serialization.len()).unwrap(); + host_internal = host.into(); + serialization.push('/'); + let share = share.to_str().ok_or(())?; + serialization.extend(percent_encode(share.as_bytes(), PATH_SEGMENT_ENCODE_SET)); + }, + _ => return Err(()) + }, + + _ => return Err(()) + } + + for component in components { + if component == Component::RootDir { continue } + // FIXME: somehow work with non-unicode? + let component = component.as_os_str().to_str().ok_or(())?; + serialization.push('/'); + serialization.extend(percent_encode(component.as_bytes(), PATH_SEGMENT_ENCODE_SET)); + } + Ok((host_end, host_internal)) +} + +#[cfg(any(unix, target_os = "redox"))] +fn file_url_segments_to_pathbuf(host: Option<&str>, segments: str::Split) -> Result { + use std::ffi::OsStr; + use std::os::unix::prelude::OsStrExt; + use std::path::PathBuf; + + if host.is_some() { + return Err(()); + } + + let mut bytes = Vec::new(); + for segment in segments { + bytes.push(b'/'); + bytes.extend(percent_decode(segment.as_bytes())); + } + let os_str = OsStr::from_bytes(&bytes); + let path = PathBuf::from(os_str); + debug_assert!(path.is_absolute(), + "to_file_path() failed to produce an absolute Path"); + Ok(path) +} + +#[cfg(windows)] +fn file_url_segments_to_pathbuf(host: Option<&str>, segments: str::Split) -> Result { + file_url_segments_to_pathbuf_windows(host, segments) +} + +// Build this unconditionally to alleviate https://github.com/servo/rust-url/issues/102 +#[cfg_attr(not(windows), allow(dead_code))] +fn file_url_segments_to_pathbuf_windows(host: Option<&str>, mut segments: str::Split) -> Result { + + let mut string = if let Some(host) = host { + r"\\".to_owned() + host + } else { + let first = segments.next().ok_or(())?; + + match first.len() { + 2 => { + if !first.starts_with(parser::ascii_alpha) || first.as_bytes()[1] != b':' { + return Err(()) + } + + first.to_owned() + }, + + 4 => { + if !first.starts_with(parser::ascii_alpha) { + return Err(()) + } + let bytes = first.as_bytes(); + if bytes[1] != b'%' || bytes[2] != b'3' || (bytes[3] != b'a' && bytes[3] != b'A') { + return Err(()) + } + + first[0..1].to_owned() + ":" + }, + + _ => return Err(()), + } + }; + + for segment in segments { + string.push('\\'); + + // Currently non-unicode windows paths cannot be represented + match String::from_utf8(percent_decode(segment.as_bytes()).collect()) { + Ok(s) => string.push_str(&s), + Err(..) => return Err(()), + } + } + let path = PathBuf::from(string); + debug_assert!(path.is_absolute(), + "to_file_path() failed to produce an absolute Path"); + Ok(path) +} + +fn io_error(reason: &str) -> io::Result { + Err(io::Error::new(io::ErrorKind::InvalidData, reason)) +} + +/// Implementation detail of `Url::query_pairs_mut`. Typically not used directly. +#[derive(Debug)] +pub struct UrlQuery<'a> { + url: &'a mut Url, + fragment: Option, +} + +impl<'a> Drop for UrlQuery<'a> { + fn drop(&mut self) { + self.url.restore_already_parsed_fragment(self.fragment.take()) + } +} + + +/// Define a new struct +/// that implements the [`EncodeSet`](percent_encoding/trait.EncodeSet.html) trait, +/// for use in [`percent_decode()`](percent_encoding/fn.percent_encode.html) +/// and related functions. +/// +/// Parameters are characters to include in the set in addition to those of the base set. +/// See [encode sets specification](http://url.spec.whatwg.org/#simple-encode-set). +/// +/// Example +/// ======= +/// +/// ```rust +/// #[macro_use] extern crate url; +/// use url::percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET}; +/// define_encode_set! { +/// /// This encode set is used in the URL parser for query strings. +/// pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} +/// } +/// # fn main() { +/// assert_eq!(utf8_percent_encode("foo bar", QUERY_ENCODE_SET).collect::(), "foo%20bar"); +/// # } +/// ``` +#[macro_export] +macro_rules! define_encode_set { + ($(#[$attr: meta])* pub $name: ident = [$base_set: expr] | {$($ch: pat),*}) => { + $(#[$attr])* + #[derive(Copy, Clone)] + #[allow(non_camel_case_types)] + pub struct $name; + + impl $crate::percent_encoding::EncodeSet for $name { + #[inline] + fn contains(&self, byte: u8) -> bool { + match byte as char { + $( + $ch => true, + )* + _ => $base_set.contains(byte) + } + } + } + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/origin.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/origin.rs new file mode 100644 index 000000000..e19a28577 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/origin.rs @@ -0,0 +1,130 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[cfg(feature = "heapsize")] use heapsize::HeapSizeOf; +use host::Host; +use idna::domain_to_unicode; +use parser::default_port; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; +use Url; + +pub fn url_origin(url: &Url) -> Origin { + let scheme = url.scheme(); + match scheme { + "blob" => { + let result = Url::parse(url.path()); + match result { + Ok(ref url) => url_origin(url), + Err(_) => Origin::new_opaque() + } + }, + "ftp" | "gopher" | "http" | "https" | "ws" | "wss" => { + Origin::Tuple(scheme.to_owned(), url.host().unwrap().to_owned(), + url.port_or_known_default().unwrap()) + }, + // TODO: Figure out what to do if the scheme is a file + "file" => Origin::new_opaque(), + _ => Origin::new_opaque() + } +} + +/// The origin of an URL +/// +/// Two URLs with the same origin are considered +/// to originate from the same entity and can therefore trust +/// each other. +/// +/// The origin is determined based on the scheme as follows: +/// +/// - If the scheme is "blob" the origin is the origin of the +/// URL contained in the path component. If parsing fails, +/// it is an opaque origin. +/// - If the scheme is "ftp", "gopher", "http", "https", "ws", or "wss", +/// then the origin is a tuple of the scheme, host, and port. +/// - If the scheme is anything else, the origin is opaque, meaning +/// the URL does not have the same origin as any other URL. +/// +/// For more information see https://url.spec.whatwg.org/#origin +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +pub enum Origin { + /// A globally unique identifier + Opaque(OpaqueOrigin), + + /// Consists of the URL's scheme, host and port + Tuple(String, Host, u16) +} + +#[cfg(feature = "heapsize")] +impl HeapSizeOf for Origin { + fn heap_size_of_children(&self) -> usize { + match *self { + Origin::Tuple(ref scheme, ref host, _) => { + scheme.heap_size_of_children() + + host.heap_size_of_children() + }, + _ => 0, + } + } +} + + +impl Origin { + /// Creates a new opaque origin that is only equal to itself. + pub fn new_opaque() -> Origin { + static COUNTER: AtomicUsize = ATOMIC_USIZE_INIT; + Origin::Opaque(OpaqueOrigin(COUNTER.fetch_add(1, Ordering::SeqCst))) + } + + /// Return whether this origin is a (scheme, host, port) tuple + /// (as opposed to an opaque origin). + pub fn is_tuple(&self) -> bool { + matches!(*self, Origin::Tuple(..)) + } + + /// https://html.spec.whatwg.org/multipage/#ascii-serialisation-of-an-origin + pub fn ascii_serialization(&self) -> String { + match *self { + Origin::Opaque(_) => "null".to_owned(), + Origin::Tuple(ref scheme, ref host, port) => { + if default_port(scheme) == Some(port) { + format!("{}://{}", scheme, host) + } else { + format!("{}://{}:{}", scheme, host, port) + } + } + } + } + + /// https://html.spec.whatwg.org/multipage/#unicode-serialisation-of-an-origin + pub fn unicode_serialization(&self) -> String { + match *self { + Origin::Opaque(_) => "null".to_owned(), + Origin::Tuple(ref scheme, ref host, port) => { + let host = match *host { + Host::Domain(ref domain) => { + let (domain, _errors) = domain_to_unicode(domain); + Host::Domain(domain) + } + _ => host.clone() + }; + if default_port(scheme) == Some(port) { + format!("{}://{}", scheme, host) + } else { + format!("{}://{}:{}", scheme, host, port) + } + } + } + } +} + +/// Opaque identifier for URLs that have file or other schemes +#[derive(Eq, PartialEq, Hash, Clone, Debug)] +pub struct OpaqueOrigin(usize); + +#[cfg(feature = "heapsize")] +known_heap_size!(0, OpaqueOrigin); diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/parser.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/parser.rs new file mode 100644 index 000000000..ead0aa9cc --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/parser.rs @@ -0,0 +1,1182 @@ +// Copyright 2013-2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ascii::AsciiExt; +use std::error::Error; +use std::fmt::{self, Formatter, Write}; +use std::str; + +use Url; +use encoding::EncodingOverride; +use host::{Host, HostInternal}; +use percent_encoding::{ + utf8_percent_encode, percent_encode, + SIMPLE_ENCODE_SET, DEFAULT_ENCODE_SET, USERINFO_ENCODE_SET, QUERY_ENCODE_SET, + PATH_SEGMENT_ENCODE_SET +}; + +pub type ParseResult = Result; + +macro_rules! simple_enum_error { + ($($name: ident => $description: expr,)+) => { + /// Errors that can occur during parsing. + #[derive(PartialEq, Eq, Clone, Copy, Debug)] + pub enum ParseError { + $( + $name, + )+ + } + + impl Error for ParseError { + fn description(&self) -> &str { + match *self { + $( + ParseError::$name => $description, + )+ + } + } + } + } +} + +simple_enum_error! { + EmptyHost => "empty host", + IdnaError => "invalid international domain name", + InvalidPort => "invalid port number", + InvalidIpv4Address => "invalid IPv4 address", + InvalidIpv6Address => "invalid IPv6 address", + InvalidDomainCharacter => "invalid domain character", + RelativeUrlWithoutBase => "relative URL without a base", + RelativeUrlWithCannotBeABaseBase => "relative URL with a cannot-be-a-base base", + SetHostOnCannotBeABaseUrl => "a cannot-be-a-base URL doesn’t have a host to set", + Overflow => "URLs more than 4 GB are not supported", +} + +#[cfg(feature = "heapsize")] +known_heap_size!(0, ParseError); + +impl fmt::Display for ParseError { + fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { + self.description().fmt(fmt) + } +} + +impl From<::idna::uts46::Errors> for ParseError { + fn from(_: ::idna::uts46::Errors) -> ParseError { ParseError::IdnaError } +} + +#[derive(Copy, Clone)] +pub enum SchemeType { + File, + SpecialNotFile, + NotSpecial, +} + +impl SchemeType { + pub fn is_special(&self) -> bool { + !matches!(*self, SchemeType::NotSpecial) + } + + pub fn is_file(&self) -> bool { + matches!(*self, SchemeType::File) + } + + pub fn from(s: &str) -> Self { + match s { + "http" | "https" | "ws" | "wss" | "ftp" | "gopher" => SchemeType::SpecialNotFile, + "file" => SchemeType::File, + _ => SchemeType::NotSpecial, + } + } +} + +pub fn default_port(scheme: &str) -> Option { + match scheme { + "http" | "ws" => Some(80), + "https" | "wss" => Some(443), + "ftp" => Some(21), + "gopher" => Some(70), + _ => None, + } +} + +#[derive(Clone)] +pub struct Input<'i> { + chars: str::Chars<'i>, +} + +impl<'i> Input<'i> { + pub fn new(input: &'i str) -> Self { + Input::with_log(input, None) + } + + pub fn with_log(original_input: &'i str, log_syntax_violation: Option<&Fn(&'static str)>) + -> Self { + let input = original_input.trim_matches(c0_control_or_space); + if let Some(log) = log_syntax_violation { + if input.len() < original_input.len() { + log("leading or trailing control or space character are ignored in URLs") + } + if input.chars().any(|c| matches!(c, '\t' | '\n' | '\r')) { + log("tabs or newlines are ignored in URLs") + } + } + Input { chars: input.chars() } + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.clone().next().is_none() + } + + #[inline] + fn starts_with(&self, p: P) -> bool { + p.split_prefix(&mut self.clone()) + } + + #[inline] + pub fn split_prefix(&self, p: P) -> Option { + let mut remaining = self.clone(); + if p.split_prefix(&mut remaining) { + Some(remaining) + } else { + None + } + } + + #[inline] + fn split_first(&self) -> (Option, Self) { + let mut remaining = self.clone(); + (remaining.next(), remaining) + } + + #[inline] + fn count_matching bool>(&self, f: F) -> (u32, Self) { + let mut count = 0; + let mut remaining = self.clone(); + loop { + let mut input = remaining.clone(); + if matches!(input.next(), Some(c) if f(c)) { + remaining = input; + count += 1; + } else { + return (count, remaining) + } + } + } + + #[inline] + fn next_utf8(&mut self) -> Option<(char, &'i str)> { + loop { + let utf8 = self.chars.as_str(); + match self.chars.next() { + Some(c) => { + if !matches!(c, '\t' | '\n' | '\r') { + return Some((c, &utf8[..c.len_utf8()])) + } + } + None => return None + } + } + } +} + +pub trait Pattern { + fn split_prefix<'i>(self, input: &mut Input<'i>) -> bool; +} + +impl Pattern for char { + fn split_prefix<'i>(self, input: &mut Input<'i>) -> bool { input.next() == Some(self) } +} + +impl<'a> Pattern for &'a str { + fn split_prefix<'i>(self, input: &mut Input<'i>) -> bool { + for c in self.chars() { + if input.next() != Some(c) { + return false + } + } + true + } +} + +impl bool> Pattern for F { + fn split_prefix<'i>(self, input: &mut Input<'i>) -> bool { input.next().map_or(false, self) } +} + +impl<'i> Iterator for Input<'i> { + type Item = char; + fn next(&mut self) -> Option { + self.chars.by_ref().find(|&c| !matches!(c, '\t' | '\n' | '\r')) + } +} + +pub struct Parser<'a> { + pub serialization: String, + pub base_url: Option<&'a Url>, + pub query_encoding_override: EncodingOverride, + pub log_syntax_violation: Option<&'a Fn(&'static str)>, + pub context: Context, +} + +#[derive(PartialEq, Eq, Copy, Clone)] +pub enum Context { + UrlParser, + Setter, + PathSegmentSetter, +} + +impl<'a> Parser<'a> { + pub fn for_setter(serialization: String) -> Parser<'a> { + Parser { + serialization: serialization, + base_url: None, + query_encoding_override: EncodingOverride::utf8(), + log_syntax_violation: None, + context: Context::Setter, + } + } + + fn syntax_violation(&self, reason: &'static str) { + if let Some(log) = self.log_syntax_violation { + log(reason) + } + } + + fn syntax_violation_if bool>(&self, reason: &'static str, test: F) { + // Skip test if not logging. + if let Some(log) = self.log_syntax_violation { + if test() { + log(reason) + } + } + } + + /// https://url.spec.whatwg.org/#concept-basic-url-parser + pub fn parse_url(mut self, input: &str) -> ParseResult { + let input = Input::with_log(input, self.log_syntax_violation); + if let Ok(remaining) = self.parse_scheme(input.clone()) { + return self.parse_with_scheme(remaining) + } + + // No-scheme state + if let Some(base_url) = self.base_url { + if input.starts_with('#') { + self.fragment_only(base_url, input) + } else if base_url.cannot_be_a_base() { + Err(ParseError::RelativeUrlWithCannotBeABaseBase) + } else { + let scheme_type = SchemeType::from(base_url.scheme()); + if scheme_type.is_file() { + self.parse_file(input, Some(base_url)) + } else { + self.parse_relative(input, scheme_type, base_url) + } + } + } else { + Err(ParseError::RelativeUrlWithoutBase) + } + } + + pub fn parse_scheme<'i>(&mut self, mut input: Input<'i>) -> Result, ()> { + if input.is_empty() || !input.starts_with(ascii_alpha) { + return Err(()) + } + debug_assert!(self.serialization.is_empty()); + while let Some(c) = input.next() { + match c { + 'a'...'z' | 'A'...'Z' | '0'...'9' | '+' | '-' | '.' => { + self.serialization.push(c.to_ascii_lowercase()) + } + ':' => return Ok(input), + _ => { + self.serialization.clear(); + return Err(()) + } + } + } + // EOF before ':' + if self.context == Context::Setter { + Ok(input) + } else { + self.serialization.clear(); + Err(()) + } + } + + fn parse_with_scheme(mut self, input: Input) -> ParseResult { + let scheme_end = to_u32(self.serialization.len())?; + let scheme_type = SchemeType::from(&self.serialization); + self.serialization.push(':'); + match scheme_type { + SchemeType::File => { + self.syntax_violation_if("expected // after file:", || !input.starts_with("//")); + let base_file_url = self.base_url.and_then(|base| { + if base.scheme() == "file" { Some(base) } else { None } + }); + self.serialization.clear(); + self.parse_file(input, base_file_url) + } + SchemeType::SpecialNotFile => { + // special relative or authority state + let (slashes_count, remaining) = input.count_matching(|c| matches!(c, '/' | '\\')); + if let Some(base_url) = self.base_url { + if slashes_count < 2 && + base_url.scheme() == &self.serialization[..scheme_end as usize] { + // "Cannot-be-a-base" URLs only happen with "not special" schemes. + debug_assert!(!base_url.cannot_be_a_base()); + self.serialization.clear(); + return self.parse_relative(input, scheme_type, base_url) + } + } + // special authority slashes state + self.syntax_violation_if("expected //", || { + input.clone().take_while(|&c| matches!(c, '/' | '\\')) + .collect::() != "//" + }); + self.after_double_slash(remaining, scheme_type, scheme_end) + } + SchemeType::NotSpecial => self.parse_non_special(input, scheme_type, scheme_end) + } + } + + /// Scheme other than file, http, https, ws, ws, ftp, gopher. + fn parse_non_special(mut self, input: Input, scheme_type: SchemeType, scheme_end: u32) + -> ParseResult { + // path or authority state ( + if let Some(input) = input.split_prefix("//") { + return self.after_double_slash(input, scheme_type, scheme_end) + } + // Anarchist URL (no authority) + let path_start = to_u32(self.serialization.len())?; + let username_end = path_start; + let host_start = path_start; + let host_end = path_start; + let host = HostInternal::None; + let port = None; + let remaining = if let Some(input) = input.split_prefix('/') { + let path_start = self.serialization.len(); + self.serialization.push('/'); + self.parse_path(scheme_type, &mut false, path_start, input) + } else { + self.parse_cannot_be_a_base_path(input) + }; + self.with_query_and_fragment(scheme_end, username_end, host_start, + host_end, host, port, path_start, remaining) + } + + fn parse_file(mut self, input: Input, mut base_file_url: Option<&Url>) -> ParseResult { + // file state + debug_assert!(self.serialization.is_empty()); + let (first_char, input_after_first_char) = input.split_first(); + match first_char { + None => { + if let Some(base_url) = base_file_url { + // Copy everything except the fragment + let before_fragment = match base_url.fragment_start { + Some(i) => &base_url.serialization[..i as usize], + None => &*base_url.serialization, + }; + self.serialization.push_str(before_fragment); + Ok(Url { + serialization: self.serialization, + fragment_start: None, + ..*base_url + }) + } else { + self.serialization.push_str("file:///"); + let scheme_end = "file".len() as u32; + let path_start = "file://".len() as u32; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: path_start, + host_start: path_start, + host_end: path_start, + host: HostInternal::None, + port: None, + path_start: path_start, + query_start: None, + fragment_start: None, + }) + } + }, + Some('?') => { + if let Some(base_url) = base_file_url { + // Copy everything up to the query string + let before_query = match (base_url.query_start, base_url.fragment_start) { + (None, None) => &*base_url.serialization, + (Some(i), _) | + (None, Some(i)) => base_url.slice(..i) + }; + self.serialization.push_str(before_query); + let (query_start, fragment_start) = + self.parse_query_and_fragment(base_url.scheme_end, input)?; + Ok(Url { + serialization: self.serialization, + query_start: query_start, + fragment_start: fragment_start, + ..*base_url + }) + } else { + self.serialization.push_str("file:///"); + let scheme_end = "file".len() as u32; + let path_start = "file://".len() as u32; + let (query_start, fragment_start) = + self.parse_query_and_fragment(scheme_end, input)?; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: path_start, + host_start: path_start, + host_end: path_start, + host: HostInternal::None, + port: None, + path_start: path_start, + query_start: query_start, + fragment_start: fragment_start, + }) + } + }, + Some('#') => { + if let Some(base_url) = base_file_url { + self.fragment_only(base_url, input) + } else { + self.serialization.push_str("file:///"); + let scheme_end = "file".len() as u32; + let path_start = "file://".len() as u32; + let fragment_start = "file:///".len() as u32; + self.parse_fragment(input_after_first_char); + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: path_start, + host_start: path_start, + host_end: path_start, + host: HostInternal::None, + port: None, + path_start: path_start, + query_start: None, + fragment_start: Some(fragment_start), + }) + } + } + Some('/') | Some('\\') => { + self.syntax_violation_if("backslash", || first_char == Some('\\')); + // file slash state + let (next_char, input_after_next_char) = input_after_first_char.split_first(); + self.syntax_violation_if("backslash", || next_char == Some('\\')); + if matches!(next_char, Some('/') | Some('\\')) { + // file host state + self.serialization.push_str("file://"); + let scheme_end = "file".len() as u32; + let host_start = "file://".len() as u32; + let (path_start, host, remaining) = + self.parse_file_host(input_after_next_char)?; + let host_end = to_u32(self.serialization.len())?; + let mut has_host = !matches!(host, HostInternal::None); + let remaining = if path_start { + self.parse_path_start(SchemeType::File, &mut has_host, remaining) + } else { + let path_start = self.serialization.len(); + self.serialization.push('/'); + self.parse_path(SchemeType::File, &mut has_host, path_start, remaining) + }; + // FIXME: deal with has_host + let (query_start, fragment_start) = + self.parse_query_and_fragment(scheme_end, remaining)?; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: host_start, + host_start: host_start, + host_end: host_end, + host: host, + port: None, + path_start: host_end, + query_start: query_start, + fragment_start: fragment_start, + }) + } else { + self.serialization.push_str("file:///"); + let scheme_end = "file".len() as u32; + let path_start = "file://".len(); + if let Some(base_url) = base_file_url { + let first_segment = base_url.path_segments().unwrap().next().unwrap(); + // FIXME: *normalized* drive letter + if is_windows_drive_letter(first_segment) { + self.serialization.push_str(first_segment); + self.serialization.push('/'); + } + } + let remaining = self.parse_path( + SchemeType::File, &mut false, path_start, input_after_first_char); + let (query_start, fragment_start) = + self.parse_query_and_fragment(scheme_end, remaining)?; + let path_start = path_start as u32; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: path_start, + host_start: path_start, + host_end: path_start, + host: HostInternal::None, + port: None, + path_start: path_start, + query_start: query_start, + fragment_start: fragment_start, + }) + } + } + _ => { + if starts_with_windows_drive_letter_segment(&input) { + base_file_url = None; + } + if let Some(base_url) = base_file_url { + let before_query = match (base_url.query_start, base_url.fragment_start) { + (None, None) => &*base_url.serialization, + (Some(i), _) | + (None, Some(i)) => base_url.slice(..i) + }; + self.serialization.push_str(before_query); + self.pop_path(SchemeType::File, base_url.path_start as usize); + let remaining = self.parse_path( + SchemeType::File, &mut true, base_url.path_start as usize, input); + self.with_query_and_fragment( + base_url.scheme_end, base_url.username_end, base_url.host_start, + base_url.host_end, base_url.host, base_url.port, base_url.path_start, remaining) + } else { + self.serialization.push_str("file:///"); + let scheme_end = "file".len() as u32; + let path_start = "file://".len(); + let remaining = self.parse_path( + SchemeType::File, &mut false, path_start, input); + let (query_start, fragment_start) = + self.parse_query_and_fragment(scheme_end, remaining)?; + let path_start = path_start as u32; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: path_start, + host_start: path_start, + host_end: path_start, + host: HostInternal::None, + port: None, + path_start: path_start, + query_start: query_start, + fragment_start: fragment_start, + }) + } + } + } + } + + fn parse_relative(mut self, input: Input, scheme_type: SchemeType, base_url: &Url) + -> ParseResult { + // relative state + debug_assert!(self.serialization.is_empty()); + let (first_char, input_after_first_char) = input.split_first(); + match first_char { + None => { + // Copy everything except the fragment + let before_fragment = match base_url.fragment_start { + Some(i) => &base_url.serialization[..i as usize], + None => &*base_url.serialization, + }; + self.serialization.push_str(before_fragment); + Ok(Url { + serialization: self.serialization, + fragment_start: None, + ..*base_url + }) + }, + Some('?') => { + // Copy everything up to the query string + let before_query = match (base_url.query_start, base_url.fragment_start) { + (None, None) => &*base_url.serialization, + (Some(i), _) | + (None, Some(i)) => base_url.slice(..i) + }; + self.serialization.push_str(before_query); + let (query_start, fragment_start) = + self.parse_query_and_fragment(base_url.scheme_end, input)?; + Ok(Url { + serialization: self.serialization, + query_start: query_start, + fragment_start: fragment_start, + ..*base_url + }) + }, + Some('#') => self.fragment_only(base_url, input), + Some('/') | Some('\\') => { + let (slashes_count, remaining) = input.count_matching(|c| matches!(c, '/' | '\\')); + if slashes_count >= 2 { + self.syntax_violation_if("expected //", || { + input.clone().take_while(|&c| matches!(c, '/' | '\\')) + .collect::() != "//" + }); + let scheme_end = base_url.scheme_end; + debug_assert!(base_url.byte_at(scheme_end) == b':'); + self.serialization.push_str(base_url.slice(..scheme_end + 1)); + return self.after_double_slash(remaining, scheme_type, scheme_end) + } + let path_start = base_url.path_start; + debug_assert!(base_url.byte_at(path_start) == b'/'); + self.serialization.push_str(base_url.slice(..path_start + 1)); + let remaining = self.parse_path( + scheme_type, &mut true, path_start as usize, input_after_first_char); + self.with_query_and_fragment( + base_url.scheme_end, base_url.username_end, base_url.host_start, + base_url.host_end, base_url.host, base_url.port, base_url.path_start, remaining) + } + _ => { + let before_query = match (base_url.query_start, base_url.fragment_start) { + (None, None) => &*base_url.serialization, + (Some(i), _) | + (None, Some(i)) => base_url.slice(..i) + }; + self.serialization.push_str(before_query); + // FIXME spec says just "remove last entry", not the "pop" algorithm + self.pop_path(scheme_type, base_url.path_start as usize); + let remaining = self.parse_path( + scheme_type, &mut true, base_url.path_start as usize, input); + self.with_query_and_fragment( + base_url.scheme_end, base_url.username_end, base_url.host_start, + base_url.host_end, base_url.host, base_url.port, base_url.path_start, remaining) + } + } + } + + fn after_double_slash(mut self, input: Input, scheme_type: SchemeType, scheme_end: u32) + -> ParseResult { + self.serialization.push('/'); + self.serialization.push('/'); + // authority state + let (username_end, remaining) = self.parse_userinfo(input, scheme_type)?; + // host state + let host_start = to_u32(self.serialization.len())?; + let (host_end, host, port, remaining) = + self.parse_host_and_port(remaining, scheme_end, scheme_type)?; + // path state + let path_start = to_u32(self.serialization.len())?; + let remaining = self.parse_path_start( + scheme_type, &mut true, remaining); + self.with_query_and_fragment(scheme_end, username_end, host_start, + host_end, host, port, path_start, remaining) + } + + /// Return (username_end, remaining) + fn parse_userinfo<'i>(&mut self, mut input: Input<'i>, scheme_type: SchemeType) + -> ParseResult<(u32, Input<'i>)> { + let mut last_at = None; + let mut remaining = input.clone(); + let mut char_count = 0; + while let Some(c) = remaining.next() { + match c { + '@' => { + if last_at.is_some() { + self.syntax_violation("unencoded @ sign in username or password") + } else { + self.syntax_violation( + "embedding authentification information (username or password) \ + in an URL is not recommended") + } + last_at = Some((char_count, remaining.clone())) + }, + '/' | '?' | '#' => break, + '\\' if scheme_type.is_special() => break, + _ => (), + } + char_count += 1; + } + let (mut userinfo_char_count, remaining) = match last_at { + None => return Ok((to_u32(self.serialization.len())?, input)), + Some((0, remaining)) => return Ok((to_u32(self.serialization.len())?, remaining)), + Some(x) => x + }; + + let mut username_end = None; + while userinfo_char_count > 0 { + let (c, utf8_c) = input.next_utf8().unwrap(); + userinfo_char_count -= 1; + if c == ':' && username_end.is_none() { + // Start parsing password + username_end = Some(to_u32(self.serialization.len())?); + self.serialization.push(':'); + } else { + self.check_url_code_point(c, &input); + self.serialization.extend(utf8_percent_encode(utf8_c, USERINFO_ENCODE_SET)); + } + } + let username_end = match username_end { + Some(i) => i, + None => to_u32(self.serialization.len())?, + }; + self.serialization.push('@'); + Ok((username_end, remaining)) + } + + fn parse_host_and_port<'i>(&mut self, input: Input<'i>, + scheme_end: u32, scheme_type: SchemeType) + -> ParseResult<(u32, HostInternal, Option, Input<'i>)> { + let (host, remaining) = Parser::parse_host(input, scheme_type)?; + write!(&mut self.serialization, "{}", host).unwrap(); + let host_end = to_u32(self.serialization.len())?; + let (port, remaining) = if let Some(remaining) = remaining.split_prefix(':') { + let scheme = || default_port(&self.serialization[..scheme_end as usize]); + Parser::parse_port(remaining, scheme, self.context)? + } else { + (None, remaining) + }; + if let Some(port) = port { + write!(&mut self.serialization, ":{}", port).unwrap() + } + Ok((host_end, host.into(), port, remaining)) + } + + pub fn parse_host(mut input: Input, scheme_type: SchemeType) + -> ParseResult<(Host, Input)> { + // Undo the Input abstraction here to avoid allocating in the common case + // where the host part of the input does not contain any tab or newline + let input_str = input.chars.as_str(); + let mut inside_square_brackets = false; + let mut has_ignored_chars = false; + let mut non_ignored_chars = 0; + let mut bytes = 0; + for c in input_str.chars() { + match c { + ':' if !inside_square_brackets => break, + '\\' if scheme_type.is_special() => break, + '/' | '?' | '#' => break, + '\t' | '\n' | '\r' => { + has_ignored_chars = true; + } + '[' => { + inside_square_brackets = true; + non_ignored_chars += 1 + } + ']' => { + inside_square_brackets = false; + non_ignored_chars += 1 + } + _ => non_ignored_chars += 1 + } + bytes += c.len_utf8(); + } + let replaced: String; + let host_str; + { + let host_input = input.by_ref().take(non_ignored_chars); + if has_ignored_chars { + replaced = host_input.collect(); + host_str = &*replaced + } else { + for _ in host_input {} + host_str = &input_str[..bytes] + } + } + if scheme_type.is_special() && host_str.is_empty() { + return Err(ParseError::EmptyHost) + } + let host = Host::parse(host_str)?; + Ok((host, input)) + } + + pub fn parse_file_host<'i>(&mut self, input: Input<'i>) + -> ParseResult<(bool, HostInternal, Input<'i>)> { + // Undo the Input abstraction here to avoid allocating in the common case + // where the host part of the input does not contain any tab or newline + let input_str = input.chars.as_str(); + let mut has_ignored_chars = false; + let mut non_ignored_chars = 0; + let mut bytes = 0; + for c in input_str.chars() { + match c { + '/' | '\\' | '?' | '#' => break, + '\t' | '\n' | '\r' => has_ignored_chars = true, + _ => non_ignored_chars += 1, + } + bytes += c.len_utf8(); + } + let replaced: String; + let host_str; + let mut remaining = input.clone(); + { + let host_input = remaining.by_ref().take(non_ignored_chars); + if has_ignored_chars { + replaced = host_input.collect(); + host_str = &*replaced + } else { + for _ in host_input {} + host_str = &input_str[..bytes] + } + } + if is_windows_drive_letter(host_str) { + return Ok((false, HostInternal::None, input)) + } + let host = if host_str.is_empty() { + HostInternal::None + } else { + match Host::parse(host_str)? { + Host::Domain(ref d) if d == "localhost" => HostInternal::None, + host => { + write!(&mut self.serialization, "{}", host).unwrap(); + host.into() + } + } + }; + Ok((true, host, remaining)) + } + + pub fn parse_port

(mut input: Input, default_port: P, + context: Context) + -> ParseResult<(Option, Input)> + where P: Fn() -> Option { + let mut port: u32 = 0; + let mut has_any_digit = false; + while let (Some(c), remaining) = input.split_first() { + if let Some(digit) = c.to_digit(10) { + port = port * 10 + digit; + if port > ::std::u16::MAX as u32 { + return Err(ParseError::InvalidPort) + } + has_any_digit = true; + } else if context == Context::UrlParser && !matches!(c, '/' | '\\' | '?' | '#') { + return Err(ParseError::InvalidPort) + } else { + break + } + input = remaining; + } + let mut opt_port = Some(port as u16); + if !has_any_digit || opt_port == default_port() { + opt_port = None; + } + Ok((opt_port, input)) + } + + pub fn parse_path_start<'i>(&mut self, scheme_type: SchemeType, has_host: &mut bool, + mut input: Input<'i>) + -> Input<'i> { + // Path start state + match input.split_first() { + (Some('/'), remaining) => input = remaining, + (Some('\\'), remaining) => if scheme_type.is_special() { + self.syntax_violation("backslash"); + input = remaining + }, + _ => {} + } + let path_start = self.serialization.len(); + self.serialization.push('/'); + self.parse_path(scheme_type, has_host, path_start, input) + } + + pub fn parse_path<'i>(&mut self, scheme_type: SchemeType, has_host: &mut bool, + path_start: usize, mut input: Input<'i>) + -> Input<'i> { + // Relative path state + debug_assert!(self.serialization.ends_with('/')); + loop { + let segment_start = self.serialization.len(); + let mut ends_with_slash = false; + loop { + let input_before_c = input.clone(); + let (c, utf8_c) = if let Some(x) = input.next_utf8() { x } else { break }; + match c { + '/' if self.context != Context::PathSegmentSetter => { + ends_with_slash = true; + break + }, + '\\' if self.context != Context::PathSegmentSetter && + scheme_type.is_special() => { + self.syntax_violation("backslash"); + ends_with_slash = true; + break + }, + '?' | '#' if self.context == Context::UrlParser => { + input = input_before_c; + break + }, + _ => { + self.check_url_code_point(c, &input); + if c == '%' { + let after_percent_sign = input.clone(); + if matches!(input.next(), Some('2')) && + matches!(input.next(), Some('E') | Some('e')) { + self.serialization.push('.'); + continue + } + input = after_percent_sign + } + if self.context == Context::PathSegmentSetter { + self.serialization.extend(utf8_percent_encode( + utf8_c, PATH_SEGMENT_ENCODE_SET)); + } else { + self.serialization.extend(utf8_percent_encode( + utf8_c, DEFAULT_ENCODE_SET)); + } + } + } + } + match &self.serialization[segment_start..] { + ".." => { + debug_assert!(self.serialization.as_bytes()[segment_start - 1] == b'/'); + self.serialization.truncate(segment_start - 1); // Truncate "/.." + self.pop_path(scheme_type, path_start); + if !self.serialization[path_start..].ends_with('/') { + self.serialization.push('/') + } + }, + "." => { + self.serialization.truncate(segment_start); + }, + _ => { + if scheme_type.is_file() && is_windows_drive_letter( + &self.serialization[path_start + 1..] + ) { + if self.serialization.ends_with('|') { + self.serialization.pop(); + self.serialization.push(':'); + } + if *has_host { + self.syntax_violation("file: with host and Windows drive letter"); + *has_host = false; // FIXME account for this in callers + } + } + if ends_with_slash { + self.serialization.push('/') + } + } + } + if !ends_with_slash { + break + } + } + input + } + + /// https://url.spec.whatwg.org/#pop-a-urls-path + fn pop_path(&mut self, scheme_type: SchemeType, path_start: usize) { + if self.serialization.len() > path_start { + let slash_position = self.serialization[path_start..].rfind('/').unwrap(); + // + 1 since rfind returns the position before the slash. + let segment_start = path_start + slash_position + 1; + // Don’t pop a Windows drive letter + // FIXME: *normalized* Windows drive letter + if !( + scheme_type.is_file() && + is_windows_drive_letter(&self.serialization[segment_start..]) + ) { + self.serialization.truncate(segment_start); + } + } + + } + + pub fn parse_cannot_be_a_base_path<'i>(&mut self, mut input: Input<'i>) -> Input<'i> { + loop { + let input_before_c = input.clone(); + match input.next_utf8() { + Some(('?', _)) | Some(('#', _)) if self.context == Context::UrlParser => { + return input_before_c + } + Some((c, utf8_c)) => { + self.check_url_code_point(c, &input); + self.serialization.extend(utf8_percent_encode( + utf8_c, SIMPLE_ENCODE_SET)); + } + None => return input + } + } + } + + fn with_query_and_fragment(mut self, scheme_end: u32, username_end: u32, + host_start: u32, host_end: u32, host: HostInternal, + port: Option, path_start: u32, remaining: Input) + -> ParseResult { + let (query_start, fragment_start) = + self.parse_query_and_fragment(scheme_end, remaining)?; + Ok(Url { + serialization: self.serialization, + scheme_end: scheme_end, + username_end: username_end, + host_start: host_start, + host_end: host_end, + host: host, + port: port, + path_start: path_start, + query_start: query_start, + fragment_start: fragment_start + }) + } + + /// Return (query_start, fragment_start) + fn parse_query_and_fragment(&mut self, scheme_end: u32, mut input: Input) + -> ParseResult<(Option, Option)> { + let mut query_start = None; + match input.next() { + Some('#') => {} + Some('?') => { + query_start = Some(to_u32(self.serialization.len())?); + self.serialization.push('?'); + let remaining = self.parse_query(scheme_end, input); + if let Some(remaining) = remaining { + input = remaining + } else { + return Ok((query_start, None)) + } + } + None => return Ok((None, None)), + _ => panic!("Programming error. parse_query_and_fragment() called without ? or #") + } + + let fragment_start = to_u32(self.serialization.len())?; + self.serialization.push('#'); + self.parse_fragment(input); + Ok((query_start, Some(fragment_start))) + } + + pub fn parse_query<'i>(&mut self, scheme_end: u32, mut input: Input<'i>) + -> Option> { + let mut query = String::new(); // FIXME: use a streaming decoder instead + let mut remaining = None; + while let Some(c) = input.next() { + if c == '#' && self.context == Context::UrlParser { + remaining = Some(input); + break + } else { + self.check_url_code_point(c, &input); + query.push(c); + } + } + + let encoding = match &self.serialization[..scheme_end as usize] { + "http" | "https" | "file" | "ftp" | "gopher" => self.query_encoding_override, + _ => EncodingOverride::utf8(), + }; + let query_bytes = encoding.encode(query.into()); + self.serialization.extend(percent_encode(&query_bytes, QUERY_ENCODE_SET)); + remaining + } + + fn fragment_only(mut self, base_url: &Url, mut input: Input) -> ParseResult { + let before_fragment = match base_url.fragment_start { + Some(i) => base_url.slice(..i), + None => &*base_url.serialization, + }; + debug_assert!(self.serialization.is_empty()); + self.serialization.reserve(before_fragment.len() + input.chars.as_str().len()); + self.serialization.push_str(before_fragment); + self.serialization.push('#'); + let next = input.next(); + debug_assert!(next == Some('#')); + self.parse_fragment(input); + Ok(Url { + serialization: self.serialization, + fragment_start: Some(to_u32(before_fragment.len())?), + ..*base_url + }) + } + + pub fn parse_fragment(&mut self, mut input: Input) { + while let Some((c, utf8_c)) = input.next_utf8() { + if c == '\0' { + self.syntax_violation("NULL characters are ignored in URL fragment identifiers") + } else { + self.check_url_code_point(c, &input); + self.serialization.extend(utf8_percent_encode(utf8_c, + SIMPLE_ENCODE_SET)); + } + } + } + + fn check_url_code_point(&self, c: char, input: &Input) { + if let Some(log) = self.log_syntax_violation { + if c == '%' { + let mut input = input.clone(); + if !matches!((input.next(), input.next()), (Some(a), Some(b)) + if is_ascii_hex_digit(a) && is_ascii_hex_digit(b)) { + log("expected 2 hex digits after %") + } + } else if !is_url_code_point(c) { + log("non-URL code point") + } + } + } +} + +#[inline] +fn is_ascii_hex_digit(c: char) -> bool { + matches!(c, 'a'...'f' | 'A'...'F' | '0'...'9') +} + +// Non URL code points: +// U+0000 to U+0020 (space) +// " # % < > [ \ ] ^ ` { | } +// U+007F to U+009F +// surrogates +// U+FDD0 to U+FDEF +// Last two of each plane: U+__FFFE to U+__FFFF for __ in 00 to 10 hex +#[inline] +fn is_url_code_point(c: char) -> bool { + matches!(c, + 'a'...'z' | + 'A'...'Z' | + '0'...'9' | + '!' | '$' | '&' | '\'' | '(' | ')' | '*' | '+' | ',' | '-' | + '.' | '/' | ':' | ';' | '=' | '?' | '@' | '_' | '~' | + '\u{A0}'...'\u{D7FF}' | '\u{E000}'...'\u{FDCF}' | '\u{FDF0}'...'\u{FFFD}' | + '\u{10000}'...'\u{1FFFD}' | '\u{20000}'...'\u{2FFFD}' | + '\u{30000}'...'\u{3FFFD}' | '\u{40000}'...'\u{4FFFD}' | + '\u{50000}'...'\u{5FFFD}' | '\u{60000}'...'\u{6FFFD}' | + '\u{70000}'...'\u{7FFFD}' | '\u{80000}'...'\u{8FFFD}' | + '\u{90000}'...'\u{9FFFD}' | '\u{A0000}'...'\u{AFFFD}' | + '\u{B0000}'...'\u{BFFFD}' | '\u{C0000}'...'\u{CFFFD}' | + '\u{D0000}'...'\u{DFFFD}' | '\u{E1000}'...'\u{EFFFD}' | + '\u{F0000}'...'\u{FFFFD}' | '\u{100000}'...'\u{10FFFD}') +} + +/// https://url.spec.whatwg.org/#c0-controls-and-space +#[inline] +fn c0_control_or_space(ch: char) -> bool { + ch <= ' ' // U+0000 to U+0020 +} + +/// https://url.spec.whatwg.org/#ascii-alpha +#[inline] +pub fn ascii_alpha(ch: char) -> bool { + matches!(ch, 'a'...'z' | 'A'...'Z') +} + +#[inline] +pub fn to_u32(i: usize) -> ParseResult { + if i <= ::std::u32::MAX as usize { + Ok(i as u32) + } else { + Err(ParseError::Overflow) + } +} + +/// Wether the scheme is file:, the path has a single segment, and that segment +/// is a Windows drive letter +fn is_windows_drive_letter(segment: &str) -> bool { + segment.len() == 2 + && starts_with_windows_drive_letter(segment) +} + +fn starts_with_windows_drive_letter(s: &str) -> bool { + ascii_alpha(s.as_bytes()[0] as char) + && matches!(s.as_bytes()[1], b':' | b'|') +} + +fn starts_with_windows_drive_letter_segment(input: &Input) -> bool { + let mut input = input.clone(); + matches!((input.next(), input.next(), input.next()), (Some(a), Some(b), Some(c)) + if ascii_alpha(a) && matches!(b, ':' | '|') && matches!(c, '/' | '\\' | '?' | '#')) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/path_segments.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/path_segments.rs new file mode 100644 index 000000000..f5b7d51f9 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/path_segments.rs @@ -0,0 +1,217 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use parser::{self, SchemeType, to_u32}; +use std::str; +use Url; + +/// Exposes methods to manipulate the path of an URL that is not cannot-be-base. +/// +/// The path always starts with a `/` slash, and is made of slash-separated segments. +/// There is always at least one segment (which may be the empty string). +/// +/// Examples: +/// +/// ```rust +/// use url::Url; +/// # use std::error::Error; +/// +/// # fn run() -> Result<(), Box> { +/// let mut url = Url::parse("mailto:me@example.com")?; +/// assert!(url.path_segments_mut().is_err()); +/// +/// let mut url = Url::parse("http://example.net/foo/index.html")?; +/// url.path_segments_mut().map_err(|_| "cannot be base")? +/// .pop().push("img").push("2/100%.png"); +/// assert_eq!(url.as_str(), "http://example.net/foo/img/2%2F100%25.png"); +/// # Ok(()) +/// # } +/// # run().unwrap(); +/// ``` +#[derive(Debug)] +pub struct PathSegmentsMut<'a> { + url: &'a mut Url, + after_first_slash: usize, + after_path: String, + old_after_path_position: u32, +} + +// Not re-exported outside the crate +pub fn new(url: &mut Url) -> PathSegmentsMut { + let after_path = url.take_after_path(); + let old_after_path_position = to_u32(url.serialization.len()).unwrap(); + debug_assert!(url.byte_at(url.path_start) == b'/'); + PathSegmentsMut { + after_first_slash: url.path_start as usize + "/".len(), + url: url, + old_after_path_position: old_after_path_position, + after_path: after_path, + } +} + +impl<'a> Drop for PathSegmentsMut<'a> { + fn drop(&mut self) { + self.url.restore_after_path(self.old_after_path_position, &self.after_path) + } +} + +impl<'a> PathSegmentsMut<'a> { + /// Remove all segments in the path, leaving the minimal `url.path() == "/"`. + /// + /// Returns `&mut Self` so that method calls can be chained. + /// + /// Example: + /// + /// ```rust + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let mut url = Url::parse("https://github.com/servo/rust-url/")?; + /// url.path_segments_mut().map_err(|_| "cannot be base")? + /// .clear().push("logout"); + /// assert_eq!(url.as_str(), "https://github.com/logout"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn clear(&mut self) -> &mut Self { + self.url.serialization.truncate(self.after_first_slash); + self + } + + /// Remove the last segment of this URL’s path if it is empty, + /// except if these was only one segment to begin with. + /// + /// In other words, remove one path trailing slash, if any, + /// unless it is also the initial slash (so this does nothing if `url.path() == "/")`. + /// + /// Returns `&mut Self` so that method calls can be chained. + /// + /// Example: + /// + /// ```rust + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let mut url = Url::parse("https://github.com/servo/rust-url/")?; + /// url.path_segments_mut().map_err(|_| "cannot be base")? + /// .push("pulls"); + /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url//pulls"); + /// + /// let mut url = Url::parse("https://github.com/servo/rust-url/")?; + /// url.path_segments_mut().map_err(|_| "cannot be base")? + /// .pop_if_empty().push("pulls"); + /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn pop_if_empty(&mut self) -> &mut Self { + if self.url.serialization[self.after_first_slash..].ends_with('/') { + self.url.serialization.pop(); + } + self + } + + /// Remove the last segment of this URL’s path. + /// + /// If the path only has one segment, make it empty such that `url.path() == "/"`. + /// + /// Returns `&mut Self` so that method calls can be chained. + pub fn pop(&mut self) -> &mut Self { + let last_slash = self.url.serialization[self.after_first_slash..].rfind('/').unwrap_or(0); + self.url.serialization.truncate(self.after_first_slash + last_slash); + self + } + + /// Append the given segment at the end of this URL’s path. + /// + /// See the documentation for `.extend()`. + /// + /// Returns `&mut Self` so that method calls can be chained. + pub fn push(&mut self, segment: &str) -> &mut Self { + self.extend(Some(segment)) + } + + /// Append each segment from the given iterator at the end of this URL’s path. + /// + /// Each segment is percent-encoded like in `Url::parse` or `Url::join`, + /// except that `%` and `/` characters are also encoded (to `%25` and `%2F`). + /// This is unlike `Url::parse` where `%` is left as-is in case some of the input + /// is already percent-encoded, and `/` denotes a path segment separator.) + /// + /// Note that, in addition to slashes between new segments, + /// this always adds a slash between the existing path and the new segments + /// *except* if the existing path is `"/"`. + /// If the previous last segment was empty (if the path had a trailing slash) + /// the path after `.extend()` will contain two consecutive slashes. + /// If that is undesired, call `.pop_if_empty()` first. + /// + /// To obtain a behavior similar to `Url::join`, call `.pop()` unconditionally first. + /// + /// Returns `&mut Self` so that method calls can be chained. + /// + /// Example: + /// + /// ```rust + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let mut url = Url::parse("https://github.com/")?; + /// let org = "servo"; + /// let repo = "rust-url"; + /// let issue_number = "188"; + /// url.path_segments_mut().map_err(|_| "cannot be base")? + /// .extend(&[org, repo, "issues", issue_number]); + /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/issues/188"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + /// + /// In order to make sure that parsing the serialization of an URL gives the same URL, + /// a segment is ignored if it is `"."` or `".."`: + /// + /// ```rust + /// use url::Url; + /// # use std::error::Error; + /// + /// # fn run() -> Result<(), Box> { + /// let mut url = Url::parse("https://github.com/servo")?; + /// url.path_segments_mut().map_err(|_| "cannot be base")? + /// .extend(&["..", "rust-url", ".", "pulls"]); + /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls"); + /// # Ok(()) + /// # } + /// # run().unwrap(); + /// ``` + pub fn extend(&mut self, segments: I) -> &mut Self + where I: IntoIterator, I::Item: AsRef { + let scheme_type = SchemeType::from(self.url.scheme()); + let path_start = self.url.path_start as usize; + self.url.mutate(|parser| { + parser.context = parser::Context::PathSegmentSetter; + for segment in segments { + let segment = segment.as_ref(); + if matches!(segment, "." | "..") { + continue + } + if parser.serialization.len() > path_start + 1 { + parser.serialization.push('/'); + } + let mut has_host = true; // FIXME account for this? + parser.parse_path(scheme_type, &mut has_host, path_start, + parser::Input::new(segment)); + } + }); + self + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/quirks.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/quirks.rs new file mode 100644 index 000000000..1bc88303c --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/quirks.rs @@ -0,0 +1,217 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Getters and setters for URL components implemented per https://url.spec.whatwg.org/#api +//! +//! Unless you need to be interoperable with web browsers, +//! you probably want to use `Url` method instead. + +use {Url, Position, Host, ParseError, idna}; +use parser::{Parser, SchemeType, default_port, Context, Input}; + +/// https://url.spec.whatwg.org/#dom-url-domaintoascii +pub fn domain_to_ascii(domain: &str) -> String { + match Host::parse(domain) { + Ok(Host::Domain(domain)) => domain, + _ => String::new(), + } +} + +/// https://url.spec.whatwg.org/#dom-url-domaintounicode +pub fn domain_to_unicode(domain: &str) -> String { + match Host::parse(domain) { + Ok(Host::Domain(ref domain)) => { + let (unicode, _errors) = idna::domain_to_unicode(domain); + unicode + } + _ => String::new(), + } +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-href +pub fn href(url: &Url) -> &str { + url.as_str() +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-href +pub fn set_href(url: &mut Url, value: &str) -> Result<(), ParseError> { + *url = Url::parse(value)?; + Ok(()) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-origin +pub fn origin(url: &Url) -> String { + url.origin().unicode_serialization() +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-protocol +#[inline] +pub fn protocol(url: &Url) -> &str { + &url.as_str()[..url.scheme().len() + ":".len()] +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-protocol +pub fn set_protocol(url: &mut Url, mut new_protocol: &str) -> Result<(), ()> { + // The scheme state in the spec ignores everything after the first `:`, + // but `set_scheme` errors if there is more. + if let Some(position) = new_protocol.find(':') { + new_protocol = &new_protocol[..position]; + } + url.set_scheme(new_protocol) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-username +#[inline] +pub fn username(url: &Url) -> &str { + url.username() +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-username +pub fn set_username(url: &mut Url, new_username: &str) -> Result<(), ()> { + url.set_username(new_username) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-password +#[inline] +pub fn password(url: &Url) -> &str { + url.password().unwrap_or("") +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-password +pub fn set_password(url: &mut Url, new_password: &str) -> Result<(), ()> { + url.set_password(if new_password.is_empty() { None } else { Some(new_password) }) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-host +#[inline] +pub fn host(url: &Url) -> &str { + &url[Position::BeforeHost..Position::AfterPort] +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-host +pub fn set_host(url: &mut Url, new_host: &str) -> Result<(), ()> { + if url.cannot_be_a_base() { + return Err(()) + } + let host; + let opt_port; + { + let scheme = url.scheme(); + let result = Parser::parse_host(Input::new(new_host), SchemeType::from(scheme)); + match result { + Ok((h, remaining)) => { + host = h; + opt_port = if let Some(remaining) = remaining.split_prefix(':') { + Parser::parse_port(remaining, || default_port(scheme), Context::Setter) + .ok().map(|(port, _remaining)| port) + } else { + None + }; + } + Err(_) => return Err(()) + } + } + url.set_host_internal(host, opt_port); + Ok(()) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-hostname +#[inline] +pub fn hostname(url: &Url) -> &str { + url.host_str().unwrap_or("") +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-hostname +pub fn set_hostname(url: &mut Url, new_hostname: &str) -> Result<(), ()> { + if url.cannot_be_a_base() { + return Err(()) + } + let result = Parser::parse_host(Input::new(new_hostname), SchemeType::from(url.scheme())); + if let Ok((host, _remaining)) = result { + url.set_host_internal(host, None); + Ok(()) + } else { + Err(()) + } +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-port +#[inline] +pub fn port(url: &Url) -> &str { + &url[Position::BeforePort..Position::AfterPort] +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-port +pub fn set_port(url: &mut Url, new_port: &str) -> Result<(), ()> { + let result; + { + // has_host implies !cannot_be_a_base + let scheme = url.scheme(); + if !url.has_host() || scheme == "file" { + return Err(()) + } + result = Parser::parse_port(Input::new(new_port), || default_port(scheme), Context::Setter) + } + if let Ok((new_port, _remaining)) = result { + url.set_port_internal(new_port); + Ok(()) + } else { + Err(()) + } +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-pathname +#[inline] +pub fn pathname(url: &Url) -> &str { + url.path() +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-pathname +pub fn set_pathname(url: &mut Url, new_pathname: &str) { + if !url.cannot_be_a_base() { + url.set_path(new_pathname) + } +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-search +pub fn search(url: &Url) -> &str { + trim(&url[Position::AfterPath..Position::AfterQuery]) +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-search +pub fn set_search(url: &mut Url, new_search: &str) { + url.set_query(match new_search { + "" => None, + _ if new_search.starts_with('?') => Some(&new_search[1..]), + _ => Some(new_search), + }) +} + +/// Getter for https://url.spec.whatwg.org/#dom-url-hash +pub fn hash(url: &Url) -> &str { + trim(&url[Position::AfterQuery..]) +} + +/// Setter for https://url.spec.whatwg.org/#dom-url-hash +pub fn set_hash(url: &mut Url, new_hash: &str) { + if url.scheme() != "javascript" { + url.set_fragment(match new_hash { + "" => None, + _ if new_hash.starts_with('#') => Some(&new_hash[1..]), + _ => Some(new_hash), + }) + } +} + +fn trim(s: &str) -> &str { + if s.len() == 1 { + "" + } else { + s + } +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/src/slicing.rs b/collector/compile-benchmarks/cargo/url-1.5.1/src/slicing.rs new file mode 100644 index 000000000..926f3c796 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/src/slicing.rs @@ -0,0 +1,182 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ops::{Range, RangeFrom, RangeTo, RangeFull, Index}; +use Url; + +impl Index for Url { + type Output = str; + fn index(&self, _: RangeFull) -> &str { + &self.serialization + } +} + +impl Index> for Url { + type Output = str; + fn index(&self, range: RangeFrom) -> &str { + &self.serialization[self.index(range.start)..] + } +} + +impl Index> for Url { + type Output = str; + fn index(&self, range: RangeTo) -> &str { + &self.serialization[..self.index(range.end)] + } +} + +impl Index> for Url { + type Output = str; + fn index(&self, range: Range) -> &str { + &self.serialization[self.index(range.start)..self.index(range.end)] + } +} + +/// Indicates a position within a URL based on its components. +/// +/// A range of positions can be used for slicing `Url`: +/// +/// ```rust +/// # use url::{Url, Position}; +/// # fn something(some_url: Url) { +/// let serialization: &str = &some_url[..]; +/// let serialization_without_fragment: &str = &some_url[..Position::AfterQuery]; +/// let authority: &str = &some_url[Position::BeforeUsername..Position::AfterPort]; +/// let data_url_payload: &str = &some_url[Position::BeforePath..Position::AfterQuery]; +/// let scheme_relative: &str = &some_url[Position::BeforeUsername..]; +/// # } +/// ``` +/// +/// In a pseudo-grammar (where `[`…`]?` makes a sub-sequence optional), +/// URL components and delimiters that separate them are: +/// +/// ```notrust +/// url = +/// scheme ":" +/// [ "//" [ username [ ":" password ]? "@" ]? host [ ":" port ]? ]? +/// path [ "?" query ]? [ "#" fragment ]? +/// ``` +/// +/// When a given component is not present, +/// its "before" and "after" position are the same +/// (so that `&some_url[BeforeFoo..AfterFoo]` is the empty string) +/// and component ordering is preserved +/// (so that a missing query "is between" a path and a fragment). +/// +/// The end of a component and the start of the next are either the same or separate +/// by a delimiter. +/// (Not that the initial `/` of a path is considered part of the path here, not a delimiter.) +/// For example, `&url[..BeforeFragment]` would include a `#` delimiter (if present in `url`), +/// so `&url[..AfterQuery]` might be desired instead. +/// +/// `BeforeScheme` and `AfterFragment` are always the start and end of the entire URL, +/// so `&url[BeforeScheme..X]` is the same as `&url[..X]` +/// and `&url[X..AfterFragment]` is the same as `&url[X..]`. +#[derive(Copy, Clone, Debug)] +pub enum Position { + BeforeScheme, + AfterScheme, + BeforeUsername, + AfterUsername, + BeforePassword, + AfterPassword, + BeforeHost, + AfterHost, + BeforePort, + AfterPort, + BeforePath, + AfterPath, + BeforeQuery, + AfterQuery, + BeforeFragment, + AfterFragment +} + +impl Url { + #[inline] + fn index(&self, position: Position) -> usize { + match position { + Position::BeforeScheme => 0, + + Position::AfterScheme => self.scheme_end as usize, + + Position::BeforeUsername => if self.has_authority() { + self.scheme_end as usize + "://".len() + } else { + debug_assert!(self.byte_at(self.scheme_end) == b':'); + debug_assert!(self.scheme_end + ":".len() as u32 == self.username_end); + self.scheme_end as usize + ":".len() + }, + + Position::AfterUsername => self.username_end as usize, + + Position::BeforePassword => if self.has_authority() && + self.byte_at(self.username_end) == b':' { + self.username_end as usize + ":".len() + } else { + debug_assert!(self.username_end == self.host_start); + self.username_end as usize + }, + + Position::AfterPassword => if self.has_authority() && + self.byte_at(self.username_end) == b':' { + debug_assert!(self.byte_at(self.host_start - "@".len() as u32) == b'@'); + self.host_start as usize - "@".len() + } else { + debug_assert!(self.username_end == self.host_start); + self.host_start as usize + }, + + Position::BeforeHost => self.host_start as usize, + + Position::AfterHost => self.host_end as usize, + + Position::BeforePort => if self.port.is_some() { + debug_assert!(self.byte_at(self.host_end) == b':'); + self.host_end as usize + ":".len() + } else { + self.host_end as usize + }, + + Position::AfterPort => self.path_start as usize, + + Position::BeforePath => self.path_start as usize, + + Position::AfterPath => match (self.query_start, self.fragment_start) { + (Some(q), _) => q as usize, + (None, Some(f)) => f as usize, + (None, None) => self.serialization.len(), + }, + + Position::BeforeQuery => match (self.query_start, self.fragment_start) { + (Some(q), _) => { + debug_assert!(self.byte_at(q) == b'?'); + q as usize + "?".len() + } + (None, Some(f)) => f as usize, + (None, None) => self.serialization.len(), + }, + + Position::AfterQuery => match self.fragment_start { + None => self.serialization.len(), + Some(f) => f as usize, + }, + + Position::BeforeFragment => match self.fragment_start { + Some(f) => { + debug_assert!(self.byte_at(f) == b'#'); + f as usize + "#".len() + } + None => self.serialization.len(), + }, + + Position::AfterFragment => self.serialization.len(), + } + } +} + diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/tests/data.rs b/collector/compile-benchmarks/cargo/url-1.5.1/tests/data.rs new file mode 100644 index 000000000..a116df8cb --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/tests/data.rs @@ -0,0 +1,203 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Data-driven tests + +extern crate rustc_serialize; +extern crate test; +extern crate url; + +use rustc_serialize::json::{self, Json}; +use url::{Url, quirks}; + +fn check_invariants(url: &Url) { + url.check_invariants().unwrap(); + #[cfg(feature="serde")] { + extern crate serde_json; + let bytes = serde_json::to_vec(url).unwrap(); + let new_url: Url = serde_json::from_slice(&bytes).unwrap(); + assert_eq!(url, &new_url); + } +} + + +fn run_parsing(input: &str, base: &str, expected: Result) { + let base = match Url::parse(&base) { + Ok(base) => base, + Err(message) => panic!("Error parsing base {:?}: {}", base, message) + }; + let (url, expected) = match (base.join(&input), expected) { + (Ok(url), Ok(expected)) => (url, expected), + (Err(_), Err(())) => return, + (Err(message), Ok(_)) => panic!("Error parsing URL {:?}: {}", input, message), + (Ok(_), Err(())) => panic!("Expected a parse error for URL {:?}", input), + }; + + check_invariants(&url); + + macro_rules! assert_eq { + ($expected: expr, $got: expr) => { + { + let expected = $expected; + let got = $got; + assert!(expected == got, "{:?} != {} {:?} for URL {:?}", + got, stringify!($expected), expected, url); + } + } + } + + macro_rules! assert_attributes { + ($($attr: ident)+) => { + { + $( + assert_eq!(expected.$attr, quirks::$attr(&url)); + )+; + } + } + } + + assert_attributes!(href protocol username password host hostname port pathname search hash); + + if let Some(expected_origin) = expected.origin { + assert_eq!(expected_origin, quirks::origin(&url)); + } +} + +struct ExpectedAttributes { + href: String, + origin: Option, + protocol: String, + username: String, + password: String, + host: String, + hostname: String, + port: String, + pathname: String, + search: String, + hash: String, +} + +trait JsonExt { + fn take(&mut self, key: &str) -> Option; + fn object(self) -> json::Object; + fn string(self) -> String; + fn take_string(&mut self, key: &str) -> String; +} + +impl JsonExt for Json { + fn take(&mut self, key: &str) -> Option { + self.as_object_mut().unwrap().remove(key) + } + + fn object(self) -> json::Object { + if let Json::Object(o) = self { o } else { panic!("Not a Json::Object") } + } + + fn string(self) -> String { + if let Json::String(s) = self { s } else { panic!("Not a Json::String") } + } + + fn take_string(&mut self, key: &str) -> String { + self.take(key).unwrap().string() + } +} + +fn collect_parsing(add_test: &mut F) { + // Copied form https://github.com/w3c/web-platform-tests/blob/master/url/ + let mut json = Json::from_str(include_str!("urltestdata.json")) + .expect("JSON parse error in urltestdata.json"); + for entry in json.as_array_mut().unwrap() { + if entry.is_string() { + continue // ignore comments + } + let base = entry.take_string("base"); + let input = entry.take_string("input"); + let expected = if entry.find("failure").is_some() { + Err(()) + } else { + Ok(ExpectedAttributes { + href: entry.take_string("href"), + origin: entry.take("origin").map(Json::string), + protocol: entry.take_string("protocol"), + username: entry.take_string("username"), + password: entry.take_string("password"), + host: entry.take_string("host"), + hostname: entry.take_string("hostname"), + port: entry.take_string("port"), + pathname: entry.take_string("pathname"), + search: entry.take_string("search"), + hash: entry.take_string("hash"), + }) + }; + add_test(format!("{:?} @ base {:?}", input, base), + test::TestFn::dyn_test_fn(move || run_parsing(&input, &base, expected))); + } +} + +fn collect_setters(add_test: &mut F) where F: FnMut(String, test::TestFn) { + let mut json = Json::from_str(include_str!("setters_tests.json")) + .expect("JSON parse error in setters_tests.json"); + + macro_rules! setter { + ($attr: expr, $setter: ident) => {{ + let mut tests = json.take($attr).unwrap(); + for mut test in tests.as_array_mut().unwrap().drain(..) { + let comment = test.take("comment").map(Json::string).unwrap_or(String::new()); + let href = test.take_string("href"); + let new_value = test.take_string("new_value"); + let name = format!("{:?}.{} = {:?} {}", href, $attr, new_value, comment); + let mut expected = test.take("expected").unwrap(); + add_test(name, test::TestFn::dyn_test_fn(move || { + let mut url = Url::parse(&href).unwrap(); + check_invariants(&url); + let _ = quirks::$setter(&mut url, &new_value); + assert_attributes!(url, expected, + href protocol username password host hostname port pathname search hash); + check_invariants(&url); + })) + } + }} + } + macro_rules! assert_attributes { + ($url: expr, $expected: expr, $($attr: ident)+) => { + $( + if let Some(value) = $expected.take(stringify!($attr)) { + assert_eq!(quirks::$attr(&$url), value.string()) + } + )+ + } + } + setter!("protocol", set_protocol); + setter!("username", set_username); + setter!("password", set_password); + setter!("hostname", set_hostname); + setter!("host", set_host); + setter!("port", set_port); + setter!("pathname", set_pathname); + setter!("search", set_search); + setter!("hash", set_hash); +} + +fn main() { + let mut tests = Vec::new(); + { + let mut add_one = |name: String, run: test::TestFn| { + tests.push(test::TestDescAndFn { + desc: test::TestDesc { + name: test::DynTestName(name), + ignore: false, + should_panic: test::ShouldPanic::No, + }, + testfn: run, + }) + }; + collect_parsing(&mut add_one); + collect_setters(&mut add_one); + } + test::test_main(&std::env::args().collect::>(), tests) +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/tests/setters_tests.json b/collector/compile-benchmarks/cargo/url-1.5.1/tests/setters_tests.json new file mode 100644 index 000000000..99f1b5e18 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/tests/setters_tests.json @@ -0,0 +1,1148 @@ +{ + "comment": [ + "## Tests for setters of https://url.spec.whatwg.org/#urlutils-members", + "", + "This file contains a JSON object.", + "Other than 'comment', each key is an attribute of the `URL` interface", + "defined in WHATWG’s URL Standard.", + "The values are arrays of test case objects for that attribute.", + "", + "To run a test case for the attribute `attr`:", + "", + "* Create a new `URL` object with the value for the 'href' key", + " the constructor single parameter. (Without a base URL.)", + " This must not throw.", + "* Set the attribute `attr` to (invoke its setter with)", + " with the value of for 'new_value' key.", + "* The value for the 'expected' key is another object.", + " For each `key` / `value` pair of that object,", + " get the attribute `key` (invoke its getter).", + " The returned string must be equal to `value`.", + "", + "Note: the 'href' setter is already covered by urltestdata.json." + ], + "protocol": [ + { + "comment": "The empty string is not a valid scheme. Setter leaves the URL unchanged.", + "href": "a://example.net", + "new_value": "", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "href": "a://example.net", + "new_value": "b", + "expected": { + "href": "b://example.net/", + "protocol": "b:" + } + }, + { + "comment": "Upper-case ASCII is lower-cased", + "href": "a://example.net", + "new_value": "B", + "expected": { + "href": "b://example.net/", + "protocol": "b:" + } + }, + { + "comment": "Non-ASCII is rejected", + "href": "a://example.net", + "new_value": "é", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "comment": "No leading digit", + "href": "a://example.net", + "new_value": "0b", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "comment": "No leading punctuation", + "href": "a://example.net", + "new_value": "+b", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "href": "a://example.net", + "new_value": "bC0+-.", + "expected": { + "href": "bc0+-.://example.net/", + "protocol": "bc0+-.:" + } + }, + { + "comment": "Only some punctuation is acceptable", + "href": "a://example.net", + "new_value": "b,c", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "comment": "Non-ASCII is rejected", + "href": "a://example.net", + "new_value": "bé", + "expected": { + "href": "a://example.net/", + "protocol": "a:" + } + }, + { + "comment": "Spec deviation: from special scheme to not is not problematic. https://github.com/whatwg/url/issues/104", + "href": "http://example.net", + "new_value": "b", + "expected": { + "href": "b://example.net/", + "protocol": "b:" + } + }, + { + "comment": "Cannot-be-a-base URL doesn’t have a host, but URL in a special scheme must.", + "href": "mailto:me@example.net", + "new_value": "http", + "expected": { + "href": "mailto:me@example.net", + "protocol": "mailto:" + } + }, + { + "comment": "Spec deviation: from non-special scheme with a host to special is not problematic. https://github.com/whatwg/url/issues/104", + "href": "ssh://me@example.net", + "new_value": "http", + "expected": { + "href": "http://me@example.net/", + "protocol": "http:" + } + }, + { + "comment": "Stuff after the first ':' is ignored", + "href": "http://example.net", + "new_value": "https:foo : bar", + "expected": { + "href": "https://example.net/", + "protocol": "https:" + } + }, + { + "comment": "Stuff after the first ':' is ignored", + "href": "data:text/html,

Test", + "new_value": "view-source+data:foo : bar", + "expected": { + "href": "view-source+data:text/html,

Test", + "protocol": "view-source+data:" + } + } + ], + "username": [ + { + "comment": "No host means no username", + "href": "file:///home/you/index.html", + "new_value": "me", + "expected": { + "href": "file:///home/you/index.html", + "username": "" + } + }, + { + "comment": "No host means no username", + "href": "unix:/run/foo.socket", + "new_value": "me", + "expected": { + "href": "unix:/run/foo.socket", + "username": "" + } + }, + { + "comment": "Cannot-be-a-base means no username", + "href": "mailto:you@example.net", + "new_value": "me", + "expected": { + "href": "mailto:you@example.net", + "username": "" + } + }, + { + "href": "http://example.net", + "new_value": "me", + "expected": { + "href": "http://me@example.net/", + "username": "me" + } + }, + { + "href": "http://:secret@example.net", + "new_value": "me", + "expected": { + "href": "http://me:secret@example.net/", + "username": "me" + } + }, + { + "href": "http://me@example.net", + "new_value": "", + "expected": { + "href": "http://example.net/", + "username": "" + } + }, + { + "href": "http://me:secret@example.net", + "new_value": "", + "expected": { + "href": "http://:secret@example.net/", + "username": "" + } + }, + { + "comment": "UTF-8 percent encoding with the userinfo encode set.", + "href": "http://example.net", + "new_value": "\u0000\u0001\t\n\r\u001f !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~\u007f\u0080\u0081Éé", + "expected": { + "href": "http://%00%01%09%0A%0D%1F%20!%22%23$%&'()*+,-.%2F09%3A%3B%3C%3D%3E%3F%40AZ%5B%5C%5D%5E_%60az%7B%7C%7D~%7F%C2%80%C2%81%C3%89%C3%A9@example.net/", + "username": "%00%01%09%0A%0D%1F%20!%22%23$%&'()*+,-.%2F09%3A%3B%3C%3D%3E%3F%40AZ%5B%5C%5D%5E_%60az%7B%7C%7D~%7F%C2%80%C2%81%C3%89%C3%A9" + } + }, + { + "comment": "Bytes already percent-encoded are left as-is.", + "href": "http://example.net", + "new_value": "%c3%89té", + "expected": { + "href": "http://%c3%89t%C3%A9@example.net/", + "username": "%c3%89t%C3%A9" + } + } + ], + "password": [ + { + "comment": "No host means no password", + "href": "file:///home/me/index.html", + "new_value": "secret", + "expected": { + "href": "file:///home/me/index.html", + "password": "" + } + }, + { + "comment": "No host means no password", + "href": "unix:/run/foo.socket", + "new_value": "secret", + "expected": { + "href": "unix:/run/foo.socket", + "password": "" + } + }, + { + "comment": "Cannot-be-a-base means no password", + "href": "mailto:me@example.net", + "new_value": "secret", + "expected": { + "href": "mailto:me@example.net", + "password": "" + } + }, + { + "href": "http://example.net", + "new_value": "secret", + "expected": { + "href": "http://:secret@example.net/", + "password": "secret" + } + }, + { + "href": "http://me@example.net", + "new_value": "secret", + "expected": { + "href": "http://me:secret@example.net/", + "password": "secret" + } + }, + { + "href": "http://:secret@example.net", + "new_value": "", + "expected": { + "href": "http://example.net/", + "password": "" + } + }, + { + "href": "http://me:secret@example.net", + "new_value": "", + "expected": { + "href": "http://me@example.net/", + "password": "" + } + }, + { + "comment": "UTF-8 percent encoding with the userinfo encode set.", + "href": "http://example.net", + "new_value": "\u0000\u0001\t\n\r\u001f !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~\u007f\u0080\u0081Éé", + "expected": { + "href": "http://:%00%01%09%0A%0D%1F%20!%22%23$%&'()*+,-.%2F09%3A%3B%3C%3D%3E%3F%40AZ%5B%5C%5D%5E_%60az%7B%7C%7D~%7F%C2%80%C2%81%C3%89%C3%A9@example.net/", + "password": "%00%01%09%0A%0D%1F%20!%22%23$%&'()*+,-.%2F09%3A%3B%3C%3D%3E%3F%40AZ%5B%5C%5D%5E_%60az%7B%7C%7D~%7F%C2%80%C2%81%C3%89%C3%A9" + } + }, + { + "comment": "Bytes already percent-encoded are left as-is.", + "href": "http://example.net", + "new_value": "%c3%89té", + "expected": { + "href": "http://:%c3%89t%C3%A9@example.net/", + "password": "%c3%89t%C3%A9" + } + } + ], + "host": [ + { + "comment": "Cannot-be-a-base means no host", + "href": "mailto:me@example.net", + "new_value": "example.com", + "expected": { + "href": "mailto:me@example.net", + "host": "" + } + }, + { + "comment": "Cannot-be-a-base means no password", + "href": "data:text/plain,Stuff", + "new_value": "example.net", + "expected": { + "href": "data:text/plain,Stuff", + "host": "" + } + }, + { + "href": "http://example.net", + "new_value": "example.com:8080", + "expected": { + "href": "http://example.com:8080/", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Port number is unchanged if not specified in the new value", + "href": "http://example.net:8080", + "new_value": "example.com", + "expected": { + "href": "http://example.com:8080/", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Port number is removed if empty in the new value: https://github.com/whatwg/url/pull/113", + "href": "http://example.net:8080", + "new_value": "example.com:", + "expected": { + "href": "http://example.com/", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "The empty host is not valid for special schemes", + "href": "http://example.net", + "new_value": "", + "expected": { + "href": "http://example.net/", + "host": "example.net" + } + }, + { + "comment": "The empty host is OK for non-special schemes", + "href": "view-source+http://example.net/foo", + "new_value": "", + "expected": { + "href": "view-source+http:///foo", + "host": "" + } + }, + { + "comment": "Path-only URLs can gain a host", + "href": "a:/foo", + "new_value": "example.net", + "expected": { + "href": "a://example.net/foo", + "host": "example.net" + } + }, + { + "comment": "Path-only URLs can gain a host", + "href": "a:/foo", + "new_value": "example.net", + "expected": { + "href": "a://example.net/foo", + "host": "example.net" + } + }, + { + "comment": "IPv4 address syntax is normalized", + "href": "http://example.net", + "new_value": "0x7F000001:8080", + "expected": { + "href": "http://127.0.0.1:8080/", + "host": "127.0.0.1:8080", + "hostname": "127.0.0.1", + "port": "8080" + } + }, + { + "comment": "IPv6 address syntax is normalized", + "href": "http://example.net", + "new_value": "[::0:01]:2", + "expected": { + "href": "http://[::1]:2/", + "host": "[::1]:2", + "hostname": "[::1]", + "port": "2" + } + }, + { + "comment": "Default port number is removed", + "href": "http://example.net", + "new_value": "example.com:80", + "expected": { + "href": "http://example.com/", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Default port number is removed", + "href": "https://example.net", + "new_value": "example.com:443", + "expected": { + "href": "https://example.com/", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Default port number is only removed for the relevant scheme", + "href": "https://example.net", + "new_value": "example.com:80", + "expected": { + "href": "https://example.com:80/", + "host": "example.com:80", + "hostname": "example.com", + "port": "80" + } + }, + { + "comment": "Stuff after a / delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com/stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a / delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com:8080/stuff", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Stuff after a ? delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com?stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a ? delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com:8080?stuff", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Stuff after a # delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com#stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a # delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com:8080#stuff", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Stuff after a \\ delimiter is ignored for special schemes", + "href": "http://example.net/path", + "new_value": "example.com\\stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a \\ delimiter is ignored for special schemes", + "href": "http://example.net/path", + "new_value": "example.com:8080\\stuff", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "\\ is not a delimiter for non-special schemes, and it’s invalid in a domain", + "href": "view-source+http://example.net/path", + "new_value": "example.com\\stuff", + "expected": { + "href": "view-source+http://example.net/path", + "host": "example.net", + "hostname": "example.net", + "port": "" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "view-source+http://example.net/path", + "new_value": "example.com:8080stuff2", + "expected": { + "href": "view-source+http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "http://example.net/path", + "new_value": "example.com:8080stuff2", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "http://example.net/path", + "new_value": "example.com:8080+2", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Port numbers are 16 bit integers", + "href": "http://example.net/path", + "new_value": "example.com:65535", + "expected": { + "href": "http://example.com:65535/path", + "host": "example.com:65535", + "hostname": "example.com", + "port": "65535" + } + }, + { + "comment": "Port numbers are 16 bit integers, overflowing is an error. Hostname is still set, though.", + "href": "http://example.net/path", + "new_value": "example.com:65536", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + } + ], + "hostname": [ + { + "comment": "Cannot-be-a-base means no host", + "href": "mailto:me@example.net", + "new_value": "example.com", + "expected": { + "href": "mailto:me@example.net", + "host": "" + } + }, + { + "comment": "Cannot-be-a-base means no password", + "href": "data:text/plain,Stuff", + "new_value": "example.net", + "expected": { + "href": "data:text/plain,Stuff", + "host": "" + } + }, + { + "href": "http://example.net:8080", + "new_value": "example.com", + "expected": { + "href": "http://example.com:8080/", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "The empty host is not valid for special schemes", + "href": "http://example.net", + "new_value": "", + "expected": { + "href": "http://example.net/", + "host": "example.net" + } + }, + { + "comment": "The empty host is OK for non-special schemes", + "href": "view-source+http://example.net/foo", + "new_value": "", + "expected": { + "href": "view-source+http:///foo", + "host": "" + } + }, + { + "comment": "Path-only URLs can gain a host", + "href": "a:/foo", + "new_value": "example.net", + "expected": { + "href": "a://example.net/foo", + "host": "example.net" + } + }, + { + "comment": "Path-only URLs can gain a host", + "href": "a:/foo", + "new_value": "example.net", + "expected": { + "href": "a://example.net/foo", + "host": "example.net" + } + }, + { + "comment": "IPv4 address syntax is normalized", + "href": "http://example.net:8080", + "new_value": "0x7F000001", + "expected": { + "href": "http://127.0.0.1:8080/", + "host": "127.0.0.1:8080", + "hostname": "127.0.0.1", + "port": "8080" + } + }, + { + "comment": "IPv6 address syntax is normalized", + "href": "http://example.net", + "new_value": "[::0:01]", + "expected": { + "href": "http://[::1]/", + "host": "[::1]", + "hostname": "[::1]", + "port": "" + } + }, + { + "comment": "Stuff after a : delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com:8080", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a : delimiter is ignored", + "href": "http://example.net:8080/path", + "new_value": "example.com:", + "expected": { + "href": "http://example.com:8080/path", + "host": "example.com:8080", + "hostname": "example.com", + "port": "8080" + } + }, + { + "comment": "Stuff after a / delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com/stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a ? delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com?stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a # delimiter is ignored", + "href": "http://example.net/path", + "new_value": "example.com#stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "Stuff after a \\ delimiter is ignored for special schemes", + "href": "http://example.net/path", + "new_value": "example.com\\stuff", + "expected": { + "href": "http://example.com/path", + "host": "example.com", + "hostname": "example.com", + "port": "" + } + }, + { + "comment": "\\ is not a delimiter for non-special schemes, and it’s invalid in a domain", + "href": "view-source+http://example.net/path", + "new_value": "example.com\\stuff", + "expected": { + "href": "view-source+http://example.net/path", + "host": "example.net", + "hostname": "example.net", + "port": "" + } + } + ], + "port": [ + { + "href": "http://example.net", + "new_value": "8080", + "expected": { + "href": "http://example.net:8080/", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Port number is removed if empty in the new value: https://github.com/whatwg/url/pull/113", + "href": "http://example.net:8080", + "new_value": "", + "expected": { + "href": "http://example.net/", + "host": "example.net", + "hostname": "example.net", + "port": "" + } + }, + { + "comment": "Default port number is removed", + "href": "http://example.net:8080", + "new_value": "80", + "expected": { + "href": "http://example.net/", + "host": "example.net", + "hostname": "example.net", + "port": "" + } + }, + { + "comment": "Default port number is removed", + "href": "https://example.net:4433", + "new_value": "443", + "expected": { + "href": "https://example.net/", + "host": "example.net", + "hostname": "example.net", + "port": "" + } + }, + { + "comment": "Default port number is only removed for the relevant scheme", + "href": "https://example.net", + "new_value": "80", + "expected": { + "href": "https://example.net:80/", + "host": "example.net:80", + "hostname": "example.net", + "port": "80" + } + }, + { + "comment": "Stuff after a / delimiter is ignored", + "href": "http://example.net/path", + "new_value": "8080/stuff", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Stuff after a ? delimiter is ignored", + "href": "http://example.net/path", + "new_value": "8080?stuff", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Stuff after a # delimiter is ignored", + "href": "http://example.net/path", + "new_value": "8080#stuff", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Stuff after a \\ delimiter is ignored for special schemes", + "href": "http://example.net/path", + "new_value": "8080\\stuff", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "view-source+http://example.net/path", + "new_value": "8080stuff2", + "expected": { + "href": "view-source+http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "http://example.net/path", + "new_value": "8080stuff2", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Anything other than ASCII digit stops the port parser in a setter but is not an error", + "href": "http://example.net/path", + "new_value": "8080+2", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + }, + { + "comment": "Port numbers are 16 bit integers", + "href": "http://example.net/path", + "new_value": "65535", + "expected": { + "href": "http://example.net:65535/path", + "host": "example.net:65535", + "hostname": "example.net", + "port": "65535" + } + }, + { + "comment": "Port numbers are 16 bit integers, overflowing is an error", + "href": "http://example.net:8080/path", + "new_value": "65536", + "expected": { + "href": "http://example.net:8080/path", + "host": "example.net:8080", + "hostname": "example.net", + "port": "8080" + } + } + ], + "pathname": [ + { + "comment": "Cannot-be-a-base don’t have a path", + "href": "mailto:me@example.net", + "new_value": "/foo", + "expected": { + "href": "mailto:me@example.net", + "pathname": "me@example.net" + } + }, + { + "href": "unix:/run/foo.socket?timeout=10", + "new_value": "/var/log/../run/bar.socket", + "expected": { + "href": "unix:/var/run/bar.socket?timeout=10", + "pathname": "/var/run/bar.socket" + } + }, + { + "href": "https://example.net#nav", + "new_value": "home", + "expected": { + "href": "https://example.net/home#nav", + "pathname": "/home" + } + }, + { + "href": "https://example.net#nav", + "new_value": "../home", + "expected": { + "href": "https://example.net/home#nav", + "pathname": "/home" + } + }, + { + "comment": "\\ is a segment delimiter for 'special' URLs", + "href": "http://example.net/home?lang=fr#nav", + "new_value": "\\a\\%2E\\b\\%2e.\\c", + "expected": { + "href": "http://example.net/a/c?lang=fr#nav", + "pathname": "/a/c" + } + }, + { + "comment": "\\ is *not* a segment delimiter for non-'special' URLs", + "href": "view-source+http://example.net/home?lang=fr#nav", + "new_value": "\\a\\%2E\\b\\%2e.\\c", + "expected": { + "href": "view-source+http://example.net/\\a\\.\\b\\..\\c?lang=fr#nav", + "pathname": "/\\a\\.\\b\\..\\c" + } + }, + { + "comment": "UTF-8 percent encoding with the default encode set. Tabs and newlines are removed. Leading or training C0 controls and space are removed.", + "href": "a:/", + "new_value": "\u0000\u0001\t\n\r\u001f !\u0000\u0001\t\n\r\u001f !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~\u007f\u0080\u0081Éé", + "expected": { + "href": "a:/!%00%01%1F%20!%22%23$%&'()*+,-./09:;%3C=%3E%3F@AZ[\\]^_%60az%7B|%7D~%7F%C2%80%C2%81%C3%89%C3%A9", + "pathname": "/!%00%01%1F%20!%22%23$%&'()*+,-./09:;%3C=%3E%3F@AZ[\\]^_%60az%7B|%7D~%7F%C2%80%C2%81%C3%89%C3%A9" + } + }, + { + "comment": "Bytes already percent-encoded are left as-is, except %2E.", + "href": "http://example.net", + "new_value": "%2e%2E%c3%89té", + "expected": { + "href": "http://example.net/..%c3%89t%C3%A9", + "pathname": "/..%c3%89t%C3%A9" + } + } + ], + "search": [ + { + "href": "https://example.net#nav", + "new_value": "lang=fr", + "expected": { + "href": "https://example.net/?lang=fr#nav", + "search": "?lang=fr" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "lang=fr", + "expected": { + "href": "https://example.net/?lang=fr#nav", + "search": "?lang=fr" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "?lang=fr", + "expected": { + "href": "https://example.net/?lang=fr#nav", + "search": "?lang=fr" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "??lang=fr", + "expected": { + "href": "https://example.net/??lang=fr#nav", + "search": "??lang=fr" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "?", + "expected": { + "href": "https://example.net/?#nav", + "search": "" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "", + "expected": { + "href": "https://example.net/#nav", + "search": "" + } + }, + { + "href": "https://example.net?lang=en-US", + "new_value": "", + "expected": { + "href": "https://example.net/", + "search": "" + } + }, + { + "href": "https://example.net", + "new_value": "", + "expected": { + "href": "https://example.net/", + "search": "" + } + }, + { + "comment": "UTF-8 percent encoding with the query encode set. Tabs and newlines are removed. Leading or training C0 controls and space are removed.", + "href": "a:/", + "new_value": "\u0000\u0001\t\n\r\u001f !\u0000\u0001\t\n\r\u001f !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~\u007f\u0080\u0081Éé", + "expected": { + "href": "a:/?!%00%01%1F%20!%22%23$%&'()*+,-./09:;%3C=%3E?@AZ[\\]^_`az{|}~%7F%C2%80%C2%81%C3%89%C3%A9", + "search": "?!%00%01%1F%20!%22%23$%&'()*+,-./09:;%3C=%3E?@AZ[\\]^_`az{|}~%7F%C2%80%C2%81%C3%89%C3%A9" + } + }, + { + "comment": "Bytes already percent-encoded are left as-is", + "href": "http://example.net", + "new_value": "%c3%89té", + "expected": { + "href": "http://example.net/?%c3%89t%C3%A9", + "search": "?%c3%89t%C3%A9" + } + } + ], + "hash": [ + { + "href": "https://example.net", + "new_value": "main", + "expected": { + "href": "https://example.net/#main", + "hash": "#main" + } + }, + { + "href": "https://example.net#nav", + "new_value": "main", + "expected": { + "href": "https://example.net/#main", + "hash": "#main" + } + }, + { + "href": "https://example.net?lang=en-US", + "new_value": "##nav", + "expected": { + "href": "https://example.net/?lang=en-US##nav", + "hash": "##nav" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "#main", + "expected": { + "href": "https://example.net/?lang=en-US#main", + "hash": "#main" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "#", + "expected": { + "href": "https://example.net/?lang=en-US#", + "hash": "" + } + }, + { + "href": "https://example.net?lang=en-US#nav", + "new_value": "", + "expected": { + "href": "https://example.net/?lang=en-US", + "hash": "" + } + }, + { + "comment": "Simple percent-encoding; nuls, tabs, and newlines are removed", + "href": "a:/", + "new_value": "\u0000\u0001\t\n\r\u001f !\u0000\u0001\t\n\r\u001f !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~\u007f\u0080\u0081Éé", + "expected": { + "href": "a:/#!%01%1F !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~%7F%C2%80%C2%81%C3%89%C3%A9", + "hash": "#!%01%1F !\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~%7F%C2%80%C2%81%C3%89%C3%A9" + } + }, + { + "comment": "Bytes already percent-encoded are left as-is", + "href": "http://example.net", + "new_value": "%c3%89té", + "expected": { + "href": "http://example.net/#%c3%89t%C3%A9", + "hash": "#%c3%89t%C3%A9" + } + } + ] +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/tests/unit.rs b/collector/compile-benchmarks/cargo/url-1.5.1/tests/unit.rs new file mode 100644 index 000000000..3f65edd8d --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/tests/unit.rs @@ -0,0 +1,480 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Unit tests + +#[macro_use] +extern crate url; + +use std::borrow::Cow; +use std::net::{Ipv4Addr, Ipv6Addr}; +use std::path::{Path, PathBuf}; +use url::{Host, HostAndPort, Url, form_urlencoded}; + +#[test] +fn size() { + use std::mem::size_of; + assert_eq!(size_of::(), size_of::>()); +} + +macro_rules! assert_from_file_path { + ($path: expr) => { assert_from_file_path!($path, $path) }; + ($path: expr, $url_path: expr) => {{ + let url = Url::from_file_path(Path::new($path)).unwrap(); + assert_eq!(url.host(), None); + assert_eq!(url.path(), $url_path); + assert_eq!(url.to_file_path(), Ok(PathBuf::from($path))); + }}; +} + + + +#[test] +fn new_file_paths() { + if cfg!(unix) { + assert_eq!(Url::from_file_path(Path::new("relative")), Err(())); + assert_eq!(Url::from_file_path(Path::new("../relative")), Err(())); + } + if cfg!(windows) { + assert_eq!(Url::from_file_path(Path::new("relative")), Err(())); + assert_eq!(Url::from_file_path(Path::new(r"..\relative")), Err(())); + assert_eq!(Url::from_file_path(Path::new(r"\drive-relative")), Err(())); + assert_eq!(Url::from_file_path(Path::new(r"\\ucn\")), Err(())); + } + + if cfg!(unix) { + assert_from_file_path!("/foo/bar"); + assert_from_file_path!("/foo/ba\0r", "/foo/ba%00r"); + assert_from_file_path!("/foo/ba%00r", "/foo/ba%2500r"); + } +} + +#[test] +#[cfg(unix)] +fn new_path_bad_utf8() { + use std::ffi::OsStr; + use std::os::unix::prelude::*; + + let url = Url::from_file_path(Path::new(OsStr::from_bytes(b"/foo/ba\x80r"))).unwrap(); + let os_str = OsStr::from_bytes(b"/foo/ba\x80r"); + assert_eq!(url.to_file_path(), Ok(PathBuf::from(os_str))); +} + +#[test] +fn new_path_windows_fun() { + if cfg!(windows) { + assert_from_file_path!(r"C:\foo\bar", "/C:/foo/bar"); + assert_from_file_path!("C:\\foo\\ba\0r", "/C:/foo/ba%00r"); + + // Invalid UTF-8 + assert!(Url::parse("file:///C:/foo/ba%80r").unwrap().to_file_path().is_err()); + + // test windows canonicalized path + let path = PathBuf::from(r"\\?\C:\foo\bar"); + assert!(Url::from_file_path(path).is_ok()); + + // Percent-encoded drive letter + let url = Url::parse("file:///C%3A/foo/bar").unwrap(); + assert_eq!(url.to_file_path(), Ok(PathBuf::from(r"C:\foo\bar"))); + } +} + + +#[test] +fn new_directory_paths() { + if cfg!(unix) { + assert_eq!(Url::from_directory_path(Path::new("relative")), Err(())); + assert_eq!(Url::from_directory_path(Path::new("../relative")), Err(())); + + let url = Url::from_directory_path(Path::new("/foo/bar")).unwrap(); + assert_eq!(url.host(), None); + assert_eq!(url.path(), "/foo/bar/"); + } + if cfg!(windows) { + assert_eq!(Url::from_directory_path(Path::new("relative")), Err(())); + assert_eq!(Url::from_directory_path(Path::new(r"..\relative")), Err(())); + assert_eq!(Url::from_directory_path(Path::new(r"\drive-relative")), Err(())); + assert_eq!(Url::from_directory_path(Path::new(r"\\ucn\")), Err(())); + + let url = Url::from_directory_path(Path::new(r"C:\foo\bar")).unwrap(); + assert_eq!(url.host(), None); + assert_eq!(url.path(), "/C:/foo/bar/"); + } +} + +#[test] +fn from_str() { + assert!("http://testing.com/this".parse::().is_ok()); +} + +#[test] +fn parse_with_params() { + let url = Url::parse_with_params("http://testing.com/this?dont=clobberme", + &[("lang", "rust")]).unwrap(); + + assert_eq!(url.as_str(), "http://testing.com/this?dont=clobberme&lang=rust"); +} + +#[test] +fn issue_124() { + let url: Url = "file:a".parse().unwrap(); + assert_eq!(url.path(), "/a"); + let url: Url = "file:...".parse().unwrap(); + assert_eq!(url.path(), "/..."); + let url: Url = "file:..".parse().unwrap(); + assert_eq!(url.path(), "/"); +} + +#[test] +fn test_equality() { + use std::hash::{Hash, Hasher}; + use std::collections::hash_map::DefaultHasher; + + fn check_eq(a: &Url, b: &Url) { + assert_eq!(a, b); + + let mut h1 = DefaultHasher::new(); + a.hash(&mut h1); + let mut h2 = DefaultHasher::new(); + b.hash(&mut h2); + assert_eq!(h1.finish(), h2.finish()); + } + + fn url(s: &str) -> Url { + let rv = s.parse().unwrap(); + check_eq(&rv, &rv); + rv + } + + // Doesn't care if default port is given. + let a: Url = url("https://example.com/"); + let b: Url = url("https://example.com:443/"); + check_eq(&a, &b); + + // Different ports + let a: Url = url("http://example.com/"); + let b: Url = url("http://example.com:8080/"); + assert!(a != b, "{:?} != {:?}", a, b); + + // Different scheme + let a: Url = url("http://example.com/"); + let b: Url = url("https://example.com/"); + assert_ne!(a, b); + + // Different host + let a: Url = url("http://foo.com/"); + let b: Url = url("http://bar.com/"); + assert_ne!(a, b); + + // Missing path, automatically substituted. Semantically the same. + let a: Url = url("http://foo.com"); + let b: Url = url("http://foo.com/"); + check_eq(&a, &b); +} + +#[test] +fn host() { + fn assert_host(input: &str, host: Host<&str>) { + assert_eq!(Url::parse(input).unwrap().host(), Some(host)); + } + assert_host("http://www.mozilla.org", Host::Domain("www.mozilla.org")); + assert_host("http://1.35.33.49", Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49))); + assert_host("http://[2001:0db8:85a3:08d3:1319:8a2e:0370:7344]", Host::Ipv6(Ipv6Addr::new( + 0x2001, 0x0db8, 0x85a3, 0x08d3, 0x1319, 0x8a2e, 0x0370, 0x7344))); + assert_host("http://1.35.+33.49", Host::Domain("1.35.+33.49")); + assert_host("http://[::]", Host::Ipv6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0))); + assert_host("http://[::1]", Host::Ipv6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1))); + assert_host("http://0x1.0X23.0x21.061", Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49))); + assert_host("http://0x1232131", Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49))); + assert_host("http://111", Host::Ipv4(Ipv4Addr::new(0, 0, 0, 111))); + assert_host("http://2..2.3", Host::Domain("2..2.3")); + assert!(Url::parse("http://42.0x1232131").is_err()); + assert!(Url::parse("http://192.168.0.257").is_err()); +} + +#[test] +fn host_serialization() { + // libstd’s `Display for Ipv6Addr` serializes 0:0:0:0:0:0:_:_ and 0:0:0:0:0:ffff:_:_ + // using IPv4-like syntax, as suggested in https://tools.ietf.org/html/rfc5952#section-4 + // but https://url.spec.whatwg.org/#concept-ipv6-serializer specifies not to. + + // Not [::0.0.0.2] / [::ffff:0.0.0.2] + assert_eq!(Url::parse("http://[0::2]").unwrap().host_str(), Some("[::2]")); + assert_eq!(Url::parse("http://[0::ffff:0:2]").unwrap().host_str(), Some("[::ffff:0:2]")); +} + +#[test] +fn test_idna() { + assert!("http://goșu.ro".parse::().is_ok()); + assert_eq!(Url::parse("http://☃.net/").unwrap().host(), Some(Host::Domain("xn--n3h.net"))); + assert!("https://r2---sn-huoa-cvhl.googlevideo.com/crossdomain.xml".parse::().is_ok()); +} + +#[test] +fn test_serialization() { + let data = [ + ("http://example.com/", "http://example.com/"), + ("http://addslash.com", "http://addslash.com/"), + ("http://@emptyuser.com/", "http://emptyuser.com/"), + ("http://:@emptypass.com/", "http://:@emptypass.com/"), + ("http://user@user.com/", "http://user@user.com/"), + ("http://user:pass@userpass.com/", "http://user:pass@userpass.com/"), + ("http://slashquery.com/path/?q=something", "http://slashquery.com/path/?q=something"), + ("http://noslashquery.com/path?q=something", "http://noslashquery.com/path?q=something") + ]; + for &(input, result) in &data { + let url = Url::parse(input).unwrap(); + assert_eq!(url.as_str(), result); + } +} + +#[test] +fn test_form_urlencoded() { + let pairs: &[(Cow, Cow)] = &[ + ("foo".into(), "é&".into()), + ("bar".into(), "".into()), + ("foo".into(), "#".into()) + ]; + let encoded = form_urlencoded::Serializer::new(String::new()).extend_pairs(pairs).finish(); + assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23"); + assert_eq!(form_urlencoded::parse(encoded.as_bytes()).collect::>(), pairs.to_vec()); +} + +#[test] +fn test_form_serialize() { + let encoded = form_urlencoded::Serializer::new(String::new()) + .append_pair("foo", "é&") + .append_pair("bar", "") + .append_pair("foo", "#") + .finish(); + assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23"); +} + +#[test] +fn host_and_port_display() { + assert_eq!( + format!( + "{}", + HostAndPort{ host: Host::Domain("www.mozilla.org"), port: 80} + ), + "www.mozilla.org:80" + ); + assert_eq!( + format!( + "{}", + HostAndPort::{ host: Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49)), port: 65535 } + ), + "1.35.33.49:65535" + ); + assert_eq!( + format!( + "{}", + HostAndPort::{ + host: Host::Ipv6(Ipv6Addr::new( + 0x2001, 0x0db8, 0x85a3, 0x08d3, 0x1319, 0x8a2e, 0x0370, 0x7344 + )), + port: 1337 + }) + , + "[2001:db8:85a3:8d3:1319:8a2e:370:7344]:1337" + ) +} + +#[test] +/// https://github.com/servo/rust-url/issues/25 +fn issue_25() { + let filename = if cfg!(windows) { r"C:\run\pg.sock" } else { "/run/pg.sock" }; + let mut url = Url::from_file_path(filename).unwrap(); + url.check_invariants().unwrap(); + url.set_scheme("postgres").unwrap(); + url.check_invariants().unwrap(); + url.set_host(Some("")).unwrap(); + url.check_invariants().unwrap(); + url.set_username("me").unwrap(); + url.check_invariants().unwrap(); + let expected = format!("postgres://me@/{}run/pg.sock", if cfg!(windows) { "C:/" } else { "" }); + assert_eq!(url.as_str(), expected); +} + +#[test] +/// https://github.com/servo/rust-url/issues/61 +fn issue_61() { + let mut url = Url::parse("http://mozilla.org").unwrap(); + url.set_scheme("https").unwrap(); + assert_eq!(url.port(), None); + assert_eq!(url.port_or_known_default(), Some(443)); + url.check_invariants().unwrap(); +} + +#[test] +#[cfg(not(windows))] +/// https://github.com/servo/rust-url/issues/197 +fn issue_197() { + let mut url = Url::from_file_path("/").expect("Failed to parse path"); + url.check_invariants().unwrap(); + assert_eq!(url, Url::parse("file:///").expect("Failed to parse path + protocol")); + url.path_segments_mut().expect("path_segments_mut").pop_if_empty(); +} + +#[test] +fn issue_241() { + Url::parse("mailto:").unwrap().cannot_be_a_base(); +} + +#[test] +/// https://github.com/servo/rust-url/issues/222 +fn append_trailing_slash() { + let mut url: Url = "http://localhost:6767/foo/bar?a=b".parse().unwrap(); + url.check_invariants().unwrap(); + url.path_segments_mut().unwrap().push(""); + url.check_invariants().unwrap(); + assert_eq!(url.to_string(), "http://localhost:6767/foo/bar/?a=b"); +} + +#[test] +/// https://github.com/servo/rust-url/issues/227 +fn extend_query_pairs_then_mutate() { + let mut url: Url = "http://localhost:6767/foo/bar".parse().unwrap(); + url.query_pairs_mut().extend_pairs(vec![ ("auth", "my-token") ].into_iter()); + url.check_invariants().unwrap(); + assert_eq!(url.to_string(), "http://localhost:6767/foo/bar?auth=my-token"); + url.path_segments_mut().unwrap().push("some_other_path"); + url.check_invariants().unwrap(); + assert_eq!(url.to_string(), "http://localhost:6767/foo/bar/some_other_path?auth=my-token"); +} + +#[test] +/// https://github.com/servo/rust-url/issues/222 +fn append_empty_segment_then_mutate() { + let mut url: Url = "http://localhost:6767/foo/bar?a=b".parse().unwrap(); + url.check_invariants().unwrap(); + url.path_segments_mut().unwrap().push("").pop(); + url.check_invariants().unwrap(); + assert_eq!(url.to_string(), "http://localhost:6767/foo/bar?a=b"); +} + +#[test] +/// https://github.com/servo/rust-url/issues/243 +fn test_set_host() { + let mut url = Url::parse("https://example.net/hello").unwrap(); + url.set_host(Some("foo.com")).unwrap(); + assert_eq!(url.as_str(), "https://foo.com/hello"); + assert!(url.set_host(None).is_err()); + assert_eq!(url.as_str(), "https://foo.com/hello"); + assert!(url.set_host(Some("")).is_err()); + assert_eq!(url.as_str(), "https://foo.com/hello"); + + let mut url = Url::parse("foobar://example.net/hello").unwrap(); + url.set_host(None).unwrap(); + assert_eq!(url.as_str(), "foobar:/hello"); +} + +#[test] +// https://github.com/servo/rust-url/issues/166 +fn test_leading_dots() { + assert_eq!(Host::parse(".org").unwrap(), Host::Domain(".org".to_owned())); + assert_eq!(Url::parse("file://./foo").unwrap().domain(), Some(".")); +} + +// This is testing that the macro produces buildable code when invoked +// inside both a module and a function +#[test] +fn define_encode_set_scopes() { + use url::percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET}; + + define_encode_set! { + /// This encode set is used in the URL parser for query strings. + pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} + } + + assert_eq!(utf8_percent_encode("foo bar", QUERY_ENCODE_SET).collect::(), "foo%20bar"); + + mod m { + use url::percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET}; + + define_encode_set! { + /// This encode set is used in the URL parser for query strings. + pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} + } + + pub fn test() { + assert_eq!(utf8_percent_encode("foo bar", QUERY_ENCODE_SET).collect::(), "foo%20bar"); + } + } + + m::test(); +} + +#[test] +/// https://github.com/servo/rust-url/issues/302 +fn test_origin_hash() { + use std::hash::{Hash,Hasher}; + use std::collections::hash_map::DefaultHasher; + + fn hash(value: &T) -> u64 { + let mut hasher = DefaultHasher::new(); + value.hash(&mut hasher); + hasher.finish() + } + + let origin = &Url::parse("http://example.net/").unwrap().origin(); + + let origins_to_compare = [ + Url::parse("http://example.net:80/").unwrap().origin(), + Url::parse("http://example.net:81/").unwrap().origin(), + Url::parse("http://example.net").unwrap().origin(), + Url::parse("http://example.net/hello").unwrap().origin(), + Url::parse("https://example.net").unwrap().origin(), + Url::parse("ftp://example.net").unwrap().origin(), + Url::parse("file://example.net").unwrap().origin(), + Url::parse("http://user@example.net/").unwrap().origin(), + Url::parse("http://user:pass@example.net/").unwrap().origin(), + ]; + + for origin_to_compare in &origins_to_compare { + if origin == origin_to_compare { + assert_eq!(hash(origin), hash(origin_to_compare)); + } else { + assert_ne!(hash(origin), hash(origin_to_compare)); + } + } + + let opaque_origin = Url::parse("file://example.net").unwrap().origin(); + let same_opaque_origin = Url::parse("file://example.net").unwrap().origin(); + let other_opaque_origin = Url::parse("file://other").unwrap().origin(); + + assert_ne!(hash(&opaque_origin), hash(&same_opaque_origin)); + assert_ne!(hash(&opaque_origin), hash(&other_opaque_origin)); +} + +#[test] +fn test_windows_unc_path() { + if !cfg!(windows) { + return + } + + let url = Url::from_file_path(Path::new(r"\\host\share\path\file.txt")).unwrap(); + assert_eq!(url.as_str(), "file://host/share/path/file.txt"); + + let url = Url::from_file_path(Path::new(r"\\höst\share\path\file.txt")).unwrap(); + assert_eq!(url.as_str(), "file://xn--hst-sna/share/path/file.txt"); + + let url = Url::from_file_path(Path::new(r"\\192.168.0.1\share\path\file.txt")).unwrap(); + assert_eq!(url.host(), Some(Host::Ipv4(Ipv4Addr::new(192, 168, 0, 1)))); + + let path = url.to_file_path().unwrap(); + assert_eq!(path.to_str(), Some(r"\\192.168.0.1\share\path\file.txt")); + + // Another way to write these: + let url = Url::from_file_path(Path::new(r"\\?\UNC\host\share\path\file.txt")).unwrap(); + assert_eq!(url.as_str(), "file://host/share/path/file.txt"); + + // Paths starting with "\\.\" (Local Device Paths) are intentionally not supported. + let url = Url::from_file_path(Path::new(r"\\.\some\path\file.txt")); + assert!(url.is_err()); +} diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/tests/urltestdata.json b/collector/compile-benchmarks/cargo/url-1.5.1/tests/urltestdata.json new file mode 100644 index 000000000..36c32088c --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/tests/urltestdata.json @@ -0,0 +1,4445 @@ +[ + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/script-tests/segments.js", + { + "input": "http://example\t.\norg", + "base": "http://example.org/foo/bar", + "href": "http://example.org/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://user:pass@foo:21/bar;par?b#c", + "base": "http://example.org/foo/bar", + "href": "http://user:pass@foo:21/bar;par?b#c", + "origin": "http://foo:21", + "protocol": "http:", + "username": "user", + "password": "pass", + "host": "foo:21", + "hostname": "foo", + "port": "21", + "pathname": "/bar;par", + "search": "?b", + "hash": "#c" + }, + { + "input": "http:foo.com", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/foo.com", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/foo.com", + "search": "", + "hash": "" + }, + { + "input": "\t :foo.com \n", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:foo.com", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:foo.com", + "search": "", + "hash": "" + }, + { + "input": " foo.com ", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/foo.com", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/foo.com", + "search": "", + "hash": "" + }, + { + "input": "a:\t foo.com", + "base": "http://example.org/foo/bar", + "href": "a: foo.com", + "origin": "null", + "protocol": "a:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": " foo.com", + "search": "", + "hash": "" + }, + { + "input": "http://f:21/ b ? d # e ", + "base": "http://example.org/foo/bar", + "href": "http://f:21/%20b%20?%20d%20# e", + "origin": "http://f:21", + "protocol": "http:", + "username": "", + "password": "", + "host": "f:21", + "hostname": "f", + "port": "21", + "pathname": "/%20b%20", + "search": "?%20d%20", + "hash": "# e" + }, + { + "input": "http://f:/c", + "base": "http://example.org/foo/bar", + "href": "http://f/c", + "origin": "http://f", + "protocol": "http:", + "username": "", + "password": "", + "host": "f", + "hostname": "f", + "port": "", + "pathname": "/c", + "search": "", + "hash": "" + }, + { + "input": "http://f:0/c", + "base": "http://example.org/foo/bar", + "href": "http://f:0/c", + "origin": "http://f:0", + "protocol": "http:", + "username": "", + "password": "", + "host": "f:0", + "hostname": "f", + "port": "0", + "pathname": "/c", + "search": "", + "hash": "" + }, + { + "input": "http://f:00000000000000/c", + "base": "http://example.org/foo/bar", + "href": "http://f:0/c", + "origin": "http://f:0", + "protocol": "http:", + "username": "", + "password": "", + "host": "f:0", + "hostname": "f", + "port": "0", + "pathname": "/c", + "search": "", + "hash": "" + }, + { + "input": "http://f:00000000000000000000080/c", + "base": "http://example.org/foo/bar", + "href": "http://f/c", + "origin": "http://f", + "protocol": "http:", + "username": "", + "password": "", + "host": "f", + "hostname": "f", + "port": "", + "pathname": "/c", + "search": "", + "hash": "" + }, + { + "input": "http://f:b/c", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://f: /c", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://f:\n/c", + "base": "http://example.org/foo/bar", + "href": "http://f/c", + "origin": "http://f", + "protocol": "http:", + "username": "", + "password": "", + "host": "f", + "hostname": "f", + "port": "", + "pathname": "/c", + "search": "", + "hash": "" + }, + { + "input": "http://f:fifty-two/c", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://f:999999/c", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://f: 21 / b ? d # e ", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "" + }, + { + "input": " \t", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "" + }, + { + "input": ":foo.com/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:foo.com/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:foo.com/", + "search": "", + "hash": "" + }, + { + "input": ":foo.com\\", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:foo.com/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:foo.com/", + "search": "", + "hash": "" + }, + { + "input": ":", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:", + "search": "", + "hash": "" + }, + { + "input": ":a", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:a", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:a", + "search": "", + "hash": "" + }, + { + "input": ":/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:/", + "search": "", + "hash": "" + }, + { + "input": ":\\", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:/", + "search": "", + "hash": "" + }, + { + "input": ":#", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:#", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:", + "search": "", + "hash": "" + }, + { + "input": "#", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar#", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "" + }, + { + "input": "#/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar#/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "#/" + }, + { + "input": "#\\", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar#\\", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "#\\" + }, + { + "input": "#;?", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar#;?", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "#;?" + }, + { + "input": "?", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar?", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "" + }, + { + "input": "/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": ":23", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:23", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:23", + "search": "", + "hash": "" + }, + { + "input": "/:23", + "base": "http://example.org/foo/bar", + "href": "http://example.org/:23", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/:23", + "search": "", + "hash": "" + }, + { + "input": "::", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/::", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/::", + "search": "", + "hash": "" + }, + { + "input": "::23", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/::23", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/::23", + "search": "", + "hash": "" + }, + { + "input": "foo://", + "base": "http://example.org/foo/bar", + "href": "foo:///", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://a:b@c:29/d", + "base": "http://example.org/foo/bar", + "href": "http://a:b@c:29/d", + "origin": "http://c:29", + "protocol": "http:", + "username": "a", + "password": "b", + "host": "c:29", + "hostname": "c", + "port": "29", + "pathname": "/d", + "search": "", + "hash": "" + }, + { + "input": "http::@c:29", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/:@c:29", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/:@c:29", + "search": "", + "hash": "" + }, + { + "input": "http://&a:foo(b]c@d:2/", + "base": "http://example.org/foo/bar", + "href": "http://&a:foo(b%5Dc@d:2/", + "origin": "http://d:2", + "protocol": "http:", + "username": "&a", + "password": "foo(b%5Dc", + "host": "d:2", + "hostname": "d", + "port": "2", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://::@c@d:2", + "base": "http://example.org/foo/bar", + "href": "http://:%3A%40c@d:2/", + "origin": "http://d:2", + "protocol": "http:", + "username": "", + "password": "%3A%40c", + "host": "d:2", + "hostname": "d", + "port": "2", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://foo.com:b@d/", + "base": "http://example.org/foo/bar", + "href": "http://foo.com:b@d/", + "origin": "http://d", + "protocol": "http:", + "username": "foo.com", + "password": "b", + "host": "d", + "hostname": "d", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://foo.com/\\@", + "base": "http://example.org/foo/bar", + "href": "http://foo.com//@", + "origin": "http://foo.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo.com", + "hostname": "foo.com", + "port": "", + "pathname": "//@", + "search": "", + "hash": "" + }, + { + "input": "http:\\\\foo.com\\", + "base": "http://example.org/foo/bar", + "href": "http://foo.com/", + "origin": "http://foo.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo.com", + "hostname": "foo.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:\\\\a\\b:c\\d@foo.com\\", + "base": "http://example.org/foo/bar", + "href": "http://a/b:c/d@foo.com/", + "origin": "http://a", + "protocol": "http:", + "username": "", + "password": "", + "host": "a", + "hostname": "a", + "port": "", + "pathname": "/b:c/d@foo.com/", + "search": "", + "hash": "" + }, + { + "input": "foo:/", + "base": "http://example.org/foo/bar", + "href": "foo:/", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "foo:/bar.com/", + "base": "http://example.org/foo/bar", + "href": "foo:/bar.com/", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/bar.com/", + "search": "", + "hash": "" + }, + { + "input": "foo://///////", + "base": "http://example.org/foo/bar", + "href": "foo://///////", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "///////", + "search": "", + "hash": "" + }, + { + "input": "foo://///////bar.com/", + "base": "http://example.org/foo/bar", + "href": "foo://///////bar.com/", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "///////bar.com/", + "search": "", + "hash": "" + }, + { + "input": "foo:////://///", + "base": "http://example.org/foo/bar", + "href": "foo:////://///", + "origin": "null", + "protocol": "foo:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "//://///", + "search": "", + "hash": "" + }, + { + "input": "c:/foo", + "base": "http://example.org/foo/bar", + "href": "c:/foo", + "origin": "null", + "protocol": "c:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/foo", + "search": "", + "hash": "" + }, + { + "input": "//foo/bar", + "base": "http://example.org/foo/bar", + "href": "http://foo/bar", + "origin": "http://foo", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/bar", + "search": "", + "hash": "" + }, + { + "input": "http://foo/path;a??e#f#g", + "base": "http://example.org/foo/bar", + "href": "http://foo/path;a??e#f#g", + "origin": "http://foo", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/path;a", + "search": "??e", + "hash": "#f#g" + }, + { + "input": "http://foo/abcd?efgh?ijkl", + "base": "http://example.org/foo/bar", + "href": "http://foo/abcd?efgh?ijkl", + "origin": "http://foo", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/abcd", + "search": "?efgh?ijkl", + "hash": "" + }, + { + "input": "http://foo/abcd#foo?bar", + "base": "http://example.org/foo/bar", + "href": "http://foo/abcd#foo?bar", + "origin": "http://foo", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/abcd", + "search": "", + "hash": "#foo?bar" + }, + { + "input": "[61:24:74]:98", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/[61:24:74]:98", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/[61:24:74]:98", + "search": "", + "hash": "" + }, + { + "input": "http:[61:27]/:foo", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/[61:27]/:foo", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/[61:27]/:foo", + "search": "", + "hash": "" + }, + { + "input": "http://[1::2]:3:4", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://2001::1", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://2001::1]", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://2001::1]:80", + "base": "http://example.org/foo/bar", + "failure": true + }, + { + "input": "http://[2001::1]", + "base": "http://example.org/foo/bar", + "href": "http://[2001::1]/", + "origin": "http://[2001::1]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[2001::1]", + "hostname": "[2001::1]", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://[::127.0.0.1]", + "base": "http://example.org/foo/bar", + "href": "http://[::7f00:1]/", + "origin": "http://[::7f00:1]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[::7f00:1]", + "hostname": "[::7f00:1]", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://[0:0:0:0:0:0:13.1.68.3]", + "base": "http://example.org/foo/bar", + "href": "http://[::d01:4403]/", + "origin": "http://[::d01:4403]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[::d01:4403]", + "hostname": "[::d01:4403]", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://[2001::1]:80", + "base": "http://example.org/foo/bar", + "href": "http://[2001::1]/", + "origin": "http://[2001::1]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[2001::1]", + "hostname": "[2001::1]", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/example.com/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/example.com/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftp:/example.com/", + "base": "http://example.org/foo/bar", + "href": "ftp://example.com/", + "origin": "ftp://example.com", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https:/example.com/", + "base": "http://example.org/foo/bar", + "href": "https://example.com/", + "origin": "https://example.com", + "protocol": "https:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "madeupscheme:/example.com/", + "base": "http://example.org/foo/bar", + "href": "madeupscheme:/example.com/", + "origin": "null", + "protocol": "madeupscheme:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "file:/example.com/", + "base": "http://example.org/foo/bar", + "href": "file:///example.com/", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftps:/example.com/", + "base": "http://example.org/foo/bar", + "href": "ftps:/example.com/", + "origin": "null", + "protocol": "ftps:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "gopher:/example.com/", + "base": "http://example.org/foo/bar", + "href": "gopher://example.com/", + "origin": "gopher://example.com", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws:/example.com/", + "base": "http://example.org/foo/bar", + "href": "ws://example.com/", + "origin": "ws://example.com", + "protocol": "ws:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss:/example.com/", + "base": "http://example.org/foo/bar", + "href": "wss://example.com/", + "origin": "wss://example.com", + "protocol": "wss:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "data:/example.com/", + "base": "http://example.org/foo/bar", + "href": "data:/example.com/", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "javascript:/example.com/", + "base": "http://example.org/foo/bar", + "href": "javascript:/example.com/", + "origin": "null", + "protocol": "javascript:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "mailto:/example.com/", + "base": "http://example.org/foo/bar", + "href": "mailto:/example.com/", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "http:example.com/", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/example.com/", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftp:example.com/", + "base": "http://example.org/foo/bar", + "href": "ftp://example.com/", + "origin": "ftp://example.com", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https:example.com/", + "base": "http://example.org/foo/bar", + "href": "https://example.com/", + "origin": "https://example.com", + "protocol": "https:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "madeupscheme:example.com/", + "base": "http://example.org/foo/bar", + "href": "madeupscheme:example.com/", + "origin": "null", + "protocol": "madeupscheme:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftps:example.com/", + "base": "http://example.org/foo/bar", + "href": "ftps:example.com/", + "origin": "null", + "protocol": "ftps:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "gopher:example.com/", + "base": "http://example.org/foo/bar", + "href": "gopher://example.com/", + "origin": "gopher://example.com", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws:example.com/", + "base": "http://example.org/foo/bar", + "href": "ws://example.com/", + "origin": "ws://example.com", + "protocol": "ws:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss:example.com/", + "base": "http://example.org/foo/bar", + "href": "wss://example.com/", + "origin": "wss://example.com", + "protocol": "wss:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "data:example.com/", + "base": "http://example.org/foo/bar", + "href": "data:example.com/", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "javascript:example.com/", + "base": "http://example.org/foo/bar", + "href": "javascript:example.com/", + "origin": "null", + "protocol": "javascript:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "mailto:example.com/", + "base": "http://example.org/foo/bar", + "href": "mailto:example.com/", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "/a/b/c", + "base": "http://example.org/foo/bar", + "href": "http://example.org/a/b/c", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/a/b/c", + "search": "", + "hash": "" + }, + { + "input": "/a/ /c", + "base": "http://example.org/foo/bar", + "href": "http://example.org/a/%20/c", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/a/%20/c", + "search": "", + "hash": "" + }, + { + "input": "/a%2fc", + "base": "http://example.org/foo/bar", + "href": "http://example.org/a%2fc", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/a%2fc", + "search": "", + "hash": "" + }, + { + "input": "/a/%2f/c", + "base": "http://example.org/foo/bar", + "href": "http://example.org/a/%2f/c", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/a/%2f/c", + "search": "", + "hash": "" + }, + { + "input": "#β", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar#%CE%B2", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "hash": "#%CE%B2" + }, + { + "input": "data:text/html,test#test", + "base": "http://example.org/foo/bar", + "href": "data:text/html,test#test", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "text/html,test", + "search": "", + "hash": "#test" + }, + { + "input": "tel:1234567890", + "base": "http://example.org/foo/bar", + "href": "tel:1234567890", + "origin": "null", + "protocol": "tel:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "1234567890", + "search": "", + "hash": "" + }, + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/file.html", + { + "input": "file:c:\\foo\\bar.html", + "base": "file:///tmp/mock/path", + "href": "file:///c:/foo/bar.html", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/c:/foo/bar.html", + "search": "", + "hash": "" + }, + { + "input": " File:c|////foo\\bar.html", + "base": "file:///tmp/mock/path", + "href": "file:///c:////foo/bar.html", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/c:////foo/bar.html", + "search": "", + "hash": "" + }, + { + "input": "C|/foo/bar", + "base": "file:///tmp/mock/path", + "href": "file:///C:/foo/bar", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/C:/foo/bar", + "search": "", + "hash": "" + }, + { + "input": "/C|\\foo\\bar", + "base": "file:///tmp/mock/path", + "href": "file:///C:/foo/bar", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/C:/foo/bar", + "search": "", + "hash": "" + }, + { + "input": "//C|/foo/bar", + "base": "file:///tmp/mock/path", + "href": "file:///C:/foo/bar", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/C:/foo/bar", + "search": "", + "hash": "" + }, + { + "input": "//server/file", + "base": "file:///tmp/mock/path", + "href": "file://server/file", + "protocol": "file:", + "username": "", + "password": "", + "host": "server", + "hostname": "server", + "port": "", + "pathname": "/file", + "search": "", + "hash": "" + }, + { + "input": "\\\\server\\file", + "base": "file:///tmp/mock/path", + "href": "file://server/file", + "protocol": "file:", + "username": "", + "password": "", + "host": "server", + "hostname": "server", + "port": "", + "pathname": "/file", + "search": "", + "hash": "" + }, + { + "input": "/\\server/file", + "base": "file:///tmp/mock/path", + "href": "file://server/file", + "protocol": "file:", + "username": "", + "password": "", + "host": "server", + "hostname": "server", + "port": "", + "pathname": "/file", + "search": "", + "hash": "" + }, + { + "input": "file:///foo/bar.txt", + "base": "file:///tmp/mock/path", + "href": "file:///foo/bar.txt", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/foo/bar.txt", + "search": "", + "hash": "" + }, + { + "input": "file:///home/me", + "base": "file:///tmp/mock/path", + "href": "file:///home/me", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/home/me", + "search": "", + "hash": "" + }, + { + "input": "//", + "base": "file:///tmp/mock/path", + "href": "file:///", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "///", + "base": "file:///tmp/mock/path", + "href": "file:///", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "///test", + "base": "file:///tmp/mock/path", + "href": "file:///test", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/test", + "search": "", + "hash": "" + }, + { + "input": "file://test", + "base": "file:///tmp/mock/path", + "href": "file://test/", + "protocol": "file:", + "username": "", + "password": "", + "host": "test", + "hostname": "test", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "file://localhost", + "base": "file:///tmp/mock/path", + "href": "file:///", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "file://localhost/", + "base": "file:///tmp/mock/path", + "href": "file:///", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "file://localhost/test", + "base": "file:///tmp/mock/path", + "href": "file:///test", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/test", + "search": "", + "hash": "" + }, + { + "input": "test", + "base": "file:///tmp/mock/path", + "href": "file:///tmp/mock/test", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/tmp/mock/test", + "search": "", + "hash": "" + }, + { + "input": "file:test", + "base": "file:///tmp/mock/path", + "href": "file:///tmp/mock/test", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/tmp/mock/test", + "search": "", + "hash": "" + }, + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/script-tests/path.js", + { + "input": "http://example.com/././foo", + "base": "about:blank", + "href": "http://example.com/foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/./.foo", + "base": "about:blank", + "href": "http://example.com/.foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/.foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/.", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/./", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar/..", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar/../", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/..bar", + "base": "about:blank", + "href": "http://example.com/foo/..bar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/..bar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar/../ton", + "base": "about:blank", + "href": "http://example.com/foo/ton", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/ton", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar/../ton/../../a", + "base": "about:blank", + "href": "http://example.com/a", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/a", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/../../..", + "base": "about:blank", + "href": "http://example.com/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/../../../ton", + "base": "about:blank", + "href": "http://example.com/ton", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/ton", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/%2e", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/%2e%2", + "base": "about:blank", + "href": "http://example.com/foo/.%2", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/.%2", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/%2e./%2e%2e/.%2e/%2e.bar", + "base": "about:blank", + "href": "http://example.com/..bar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/..bar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com////../..", + "base": "about:blank", + "href": "http://example.com//", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "//", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar//../..", + "base": "about:blank", + "href": "http://example.com/foo/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo/bar//..", + "base": "about:blank", + "href": "http://example.com/foo/bar/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo/bar/", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo", + "base": "about:blank", + "href": "http://example.com/foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/%20foo", + "base": "about:blank", + "href": "http://example.com/%20foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%20foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%", + "base": "about:blank", + "href": "http://example.com/foo%", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%2", + "base": "about:blank", + "href": "http://example.com/foo%2", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%2", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%2zbar", + "base": "about:blank", + "href": "http://example.com/foo%2zbar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%2zbar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%2©zbar", + "base": "about:blank", + "href": "http://example.com/foo%2%C3%82%C2%A9zbar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%2%C3%82%C2%A9zbar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%41%7a", + "base": "about:blank", + "href": "http://example.com/foo%41%7a", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%41%7a", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo\t\u0091%91", + "base": "about:blank", + "href": "http://example.com/foo%C2%91%91", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%C2%91%91", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo%00%51", + "base": "about:blank", + "href": "http://example.com/foo%00%51", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foo%00%51", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/(%28:%3A%29)", + "base": "about:blank", + "href": "http://example.com/(%28:%3A%29)", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/(%28:%3A%29)", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/%3A%3a%3C%3c", + "base": "about:blank", + "href": "http://example.com/%3A%3a%3C%3c", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%3A%3a%3C%3c", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/foo\tbar", + "base": "about:blank", + "href": "http://example.com/foobar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/foobar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com\\\\foo\\\\bar", + "base": "about:blank", + "href": "http://example.com//foo//bar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "//foo//bar", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/%7Ffp3%3Eju%3Dduvgw%3Dd", + "base": "about:blank", + "href": "http://example.com/%7Ffp3%3Eju%3Dduvgw%3Dd", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%7Ffp3%3Eju%3Dduvgw%3Dd", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/@asdf%40", + "base": "about:blank", + "href": "http://example.com/@asdf%40", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/@asdf%40", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/你好你好", + "base": "about:blank", + "href": "http://example.com/%E4%BD%A0%E5%A5%BD%E4%BD%A0%E5%A5%BD", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%E4%BD%A0%E5%A5%BD%E4%BD%A0%E5%A5%BD", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/‥/foo", + "base": "about:blank", + "href": "http://example.com/%E2%80%A5/foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%E2%80%A5/foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com//foo", + "base": "about:blank", + "href": "http://example.com/%EF%BB%BF/foo", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%EF%BB%BF/foo", + "search": "", + "hash": "" + }, + { + "input": "http://example.com/‮/foo/‭/bar", + "base": "about:blank", + "href": "http://example.com/%E2%80%AE/foo/%E2%80%AD/bar", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/%E2%80%AE/foo/%E2%80%AD/bar", + "search": "", + "hash": "" + }, + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/script-tests/relative.js", + { + "input": "http://www.google.com/foo?bar=baz#", + "base": "about:blank", + "href": "http://www.google.com/foo?bar=baz#", + "origin": "http://www.google.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.google.com", + "hostname": "www.google.com", + "port": "", + "pathname": "/foo", + "search": "?bar=baz", + "hash": "" + }, + { + "input": "http://www.google.com/foo?bar=baz# »", + "base": "about:blank", + "href": "http://www.google.com/foo?bar=baz# %C2%BB", + "origin": "http://www.google.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.google.com", + "hostname": "www.google.com", + "port": "", + "pathname": "/foo", + "search": "?bar=baz", + "hash": "# %C2%BB" + }, + { + "input": "data:test# »", + "base": "about:blank", + "href": "data:test# %C2%BB", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "test", + "search": "", + "hash": "# %C2%BB" + }, + { + "input": "http://[www.google.com]/", + "base": "about:blank", + "failure": true + }, + { + "input": "http://www.google.com", + "base": "about:blank", + "href": "http://www.google.com/", + "origin": "http://www.google.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.google.com", + "hostname": "www.google.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://192.0x00A80001", + "base": "about:blank", + "href": "http://192.168.0.1/", + "origin": "http://192.168.0.1", + "protocol": "http:", + "username": "", + "password": "", + "host": "192.168.0.1", + "hostname": "192.168.0.1", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://www/foo%2Ehtml", + "base": "about:blank", + "href": "http://www/foo.html", + "origin": "http://www", + "protocol": "http:", + "username": "", + "password": "", + "host": "www", + "hostname": "www", + "port": "", + "pathname": "/foo.html", + "search": "", + "hash": "" + }, + { + "input": "http://www/foo/%2E/html", + "base": "about:blank", + "href": "http://www/foo/html", + "origin": "http://www", + "protocol": "http:", + "username": "", + "password": "", + "host": "www", + "hostname": "www", + "port": "", + "pathname": "/foo/html", + "search": "", + "hash": "" + }, + { + "input": "http://user:pass@/", + "base": "about:blank", + "failure": true + }, + { + "input": "http://%25DOMAIN:foobar@foodomain.com/", + "base": "about:blank", + "href": "http://%25DOMAIN:foobar@foodomain.com/", + "origin": "http://foodomain.com", + "protocol": "http:", + "username": "%25DOMAIN", + "password": "foobar", + "host": "foodomain.com", + "hostname": "foodomain.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:\\\\www.google.com\\foo", + "base": "about:blank", + "href": "http://www.google.com/foo", + "origin": "http://www.google.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.google.com", + "hostname": "www.google.com", + "port": "", + "pathname": "/foo", + "search": "", + "hash": "" + }, + { + "input": "http://foo:80/", + "base": "about:blank", + "href": "http://foo/", + "origin": "http://foo", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://foo:81/", + "base": "about:blank", + "href": "http://foo:81/", + "origin": "http://foo:81", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo:81", + "hostname": "foo", + "port": "81", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "httpa://foo:80/", + "base": "about:blank", + "href": "httpa://foo:80/", + "origin": "null", + "protocol": "httpa:", + "username": "", + "password": "", + "host": "foo:80", + "hostname": "foo", + "port": "80", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://foo:-80/", + "base": "about:blank", + "failure": true + }, + { + "input": "https://foo:443/", + "base": "about:blank", + "href": "https://foo/", + "origin": "https://foo", + "protocol": "https:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https://foo:80/", + "base": "about:blank", + "href": "https://foo:80/", + "origin": "https://foo:80", + "protocol": "https:", + "username": "", + "password": "", + "host": "foo:80", + "hostname": "foo", + "port": "80", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ftp://foo:21/", + "base": "about:blank", + "href": "ftp://foo/", + "origin": "ftp://foo", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ftp://foo:80/", + "base": "about:blank", + "href": "ftp://foo:80/", + "origin": "ftp://foo:80", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "foo:80", + "hostname": "foo", + "port": "80", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "gopher://foo:70/", + "base": "about:blank", + "href": "gopher://foo/", + "origin": "gopher://foo", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "gopher://foo:443/", + "base": "about:blank", + "href": "gopher://foo:443/", + "origin": "gopher://foo:443", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "foo:443", + "hostname": "foo", + "port": "443", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws://foo:80/", + "base": "about:blank", + "href": "ws://foo/", + "origin": "ws://foo", + "protocol": "ws:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws://foo:81/", + "base": "about:blank", + "href": "ws://foo:81/", + "origin": "ws://foo:81", + "protocol": "ws:", + "username": "", + "password": "", + "host": "foo:81", + "hostname": "foo", + "port": "81", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws://foo:443/", + "base": "about:blank", + "href": "ws://foo:443/", + "origin": "ws://foo:443", + "protocol": "ws:", + "username": "", + "password": "", + "host": "foo:443", + "hostname": "foo", + "port": "443", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws://foo:815/", + "base": "about:blank", + "href": "ws://foo:815/", + "origin": "ws://foo:815", + "protocol": "ws:", + "username": "", + "password": "", + "host": "foo:815", + "hostname": "foo", + "port": "815", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss://foo:80/", + "base": "about:blank", + "href": "wss://foo:80/", + "origin": "wss://foo:80", + "protocol": "wss:", + "username": "", + "password": "", + "host": "foo:80", + "hostname": "foo", + "port": "80", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss://foo:81/", + "base": "about:blank", + "href": "wss://foo:81/", + "origin": "wss://foo:81", + "protocol": "wss:", + "username": "", + "password": "", + "host": "foo:81", + "hostname": "foo", + "port": "81", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss://foo:443/", + "base": "about:blank", + "href": "wss://foo/", + "origin": "wss://foo", + "protocol": "wss:", + "username": "", + "password": "", + "host": "foo", + "hostname": "foo", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss://foo:815/", + "base": "about:blank", + "href": "wss://foo:815/", + "origin": "wss://foo:815", + "protocol": "wss:", + "username": "", + "password": "", + "host": "foo:815", + "hostname": "foo", + "port": "815", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/example.com/", + "base": "about:blank", + "href": "http://example.com/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ftp:/example.com/", + "base": "about:blank", + "href": "ftp://example.com/", + "origin": "ftp://example.com", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https:/example.com/", + "base": "about:blank", + "href": "https://example.com/", + "origin": "https://example.com", + "protocol": "https:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "madeupscheme:/example.com/", + "base": "about:blank", + "href": "madeupscheme:/example.com/", + "origin": "null", + "protocol": "madeupscheme:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "file:/example.com/", + "base": "about:blank", + "href": "file:///example.com/", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftps:/example.com/", + "base": "about:blank", + "href": "ftps:/example.com/", + "origin": "null", + "protocol": "ftps:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "gopher:/example.com/", + "base": "about:blank", + "href": "gopher://example.com/", + "origin": "gopher://example.com", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws:/example.com/", + "base": "about:blank", + "href": "ws://example.com/", + "origin": "ws://example.com", + "protocol": "ws:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss:/example.com/", + "base": "about:blank", + "href": "wss://example.com/", + "origin": "wss://example.com", + "protocol": "wss:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "data:/example.com/", + "base": "about:blank", + "href": "data:/example.com/", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "javascript:/example.com/", + "base": "about:blank", + "href": "javascript:/example.com/", + "origin": "null", + "protocol": "javascript:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "mailto:/example.com/", + "base": "about:blank", + "href": "mailto:/example.com/", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/example.com/", + "search": "", + "hash": "" + }, + { + "input": "http:example.com/", + "base": "about:blank", + "href": "http://example.com/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ftp:example.com/", + "base": "about:blank", + "href": "ftp://example.com/", + "origin": "ftp://example.com", + "protocol": "ftp:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https:example.com/", + "base": "about:blank", + "href": "https://example.com/", + "origin": "https://example.com", + "protocol": "https:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "madeupscheme:example.com/", + "base": "about:blank", + "href": "madeupscheme:example.com/", + "origin": "null", + "protocol": "madeupscheme:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "ftps:example.com/", + "base": "about:blank", + "href": "ftps:example.com/", + "origin": "null", + "protocol": "ftps:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "gopher:example.com/", + "base": "about:blank", + "href": "gopher://example.com/", + "origin": "gopher://example.com", + "protocol": "gopher:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "ws:example.com/", + "base": "about:blank", + "href": "ws://example.com/", + "origin": "ws://example.com", + "protocol": "ws:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "wss:example.com/", + "base": "about:blank", + "href": "wss://example.com/", + "origin": "wss://example.com", + "protocol": "wss:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "data:example.com/", + "base": "about:blank", + "href": "data:example.com/", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "javascript:example.com/", + "base": "about:blank", + "href": "javascript:example.com/", + "origin": "null", + "protocol": "javascript:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + { + "input": "mailto:example.com/", + "base": "about:blank", + "href": "mailto:example.com/", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "example.com/", + "search": "", + "hash": "" + }, + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/segments-userinfo-vs-host.html", + { + "input": "http:@www.example.com", + "base": "about:blank", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/@www.example.com", + "base": "about:blank", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://@www.example.com", + "base": "about:blank", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:a:b@www.example.com", + "base": "about:blank", + "href": "http://a:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/a:b@www.example.com", + "base": "about:blank", + "href": "http://a:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://a:b@www.example.com", + "base": "about:blank", + "href": "http://a:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://@pple.com", + "base": "about:blank", + "href": "http://pple.com/", + "origin": "http://pple.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "pple.com", + "hostname": "pple.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http::b@www.example.com", + "base": "about:blank", + "href": "http://:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/:b@www.example.com", + "base": "about:blank", + "href": "http://:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://:b@www.example.com", + "base": "about:blank", + "href": "http://:b@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "b", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/:@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http://user@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:/@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http://@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "https:@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:a:b@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:/a:b@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http://a:b@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http::@/www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:a:@www.example.com", + "base": "about:blank", + "href": "http://a:@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:/a:@www.example.com", + "base": "about:blank", + "href": "http://a:@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://a:@www.example.com", + "base": "about:blank", + "href": "http://a:@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "a", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://www.@pple.com", + "base": "about:blank", + "href": "http://www.@pple.com/", + "origin": "http://pple.com", + "protocol": "http:", + "username": "www.", + "password": "", + "host": "pple.com", + "hostname": "pple.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http:@:www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http:/@:www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http://@:www.example.com", + "base": "about:blank", + "failure": true + }, + { + "input": "http://:@www.example.com", + "base": "about:blank", + "href": "http://:@www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "# Others", + { + "input": "/", + "base": "http://www.example.com/test", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "/test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/test.txt", + "search": "", + "hash": "" + }, + { + "input": ".", + "base": "http://www.example.com/test", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "..", + "base": "http://www.example.com/test", + "href": "http://www.example.com/", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/test.txt", + "search": "", + "hash": "" + }, + { + "input": "./test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/test.txt", + "search": "", + "hash": "" + }, + { + "input": "../test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/test.txt", + "search": "", + "hash": "" + }, + { + "input": "../aaa/test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/aaa/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/aaa/test.txt", + "search": "", + "hash": "" + }, + { + "input": "../../test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/test.txt", + "search": "", + "hash": "" + }, + { + "input": "中/test.txt", + "base": "http://www.example.com/test", + "href": "http://www.example.com/%E4%B8%AD/test.txt", + "origin": "http://www.example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example.com", + "hostname": "www.example.com", + "port": "", + "pathname": "/%E4%B8%AD/test.txt", + "search": "", + "hash": "" + }, + { + "input": "http://www.example2.com", + "base": "http://www.example.com/test", + "href": "http://www.example2.com/", + "origin": "http://www.example2.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example2.com", + "hostname": "www.example2.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "//www.example2.com", + "base": "http://www.example.com/test", + "href": "http://www.example2.com/", + "origin": "http://www.example2.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.example2.com", + "hostname": "www.example2.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "file:...", + "base": "http://www.example.com/test", + "href": "file:///...", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/...", + "search": "", + "hash": "" + }, + { + "input": "file:..", + "base": "http://www.example.com/test", + "href": "file:///", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "file:a", + "base": "http://www.example.com/test", + "href": "file:///a", + "protocol": "file:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/a", + "search": "", + "hash": "" + }, + "# Based on http://trac.webkit.org/browser/trunk/LayoutTests/fast/url/host.html", + "Basic canonicalization, uppercase should be converted to lowercase", + { + "input": "http://ExAmPlE.CoM", + "base": "http://other.com/", + "href": "http://example.com/", + "origin": "http://example.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://example example.com", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://Goo%20 goo%7C|.com", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://[]", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://[:]", + "base": "http://other.com/", + "failure": true + }, + "U+3000 is mapped to U+0020 (space) which is disallowed", + { + "input": "http://GOO\u00a0\u3000goo.com", + "base": "http://other.com/", + "failure": true + }, + "Other types of space (no-break, zero-width, zero-width-no-break) are name-prepped away to nothing. U+200B, U+2060, and U+FEFF, are ignored", + { + "input": "http://GOO\u200b\u2060\ufeffgoo.com", + "base": "http://other.com/", + "href": "http://googoo.com/", + "origin": "http://googoo.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "googoo.com", + "hostname": "googoo.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "Ideographic full stop (full-width period for Chinese, etc.) should be treated as a dot. U+3002 is mapped to U+002E (dot)", + { + "input": "http://www.foo。bar.com", + "base": "http://other.com/", + "href": "http://www.foo.bar.com/", + "origin": "http://www.foo.bar.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "www.foo.bar.com", + "hostname": "www.foo.bar.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "Invalid unicode characters should fail... U+FDD0 is disallowed; %ef%b7%90 is U+FDD0", + { + "input": "http://\ufdd0zyx.com", + "base": "http://other.com/", + "failure": true + }, + "This is the same as previous but escaped", + { + "input": "http://%ef%b7%90zyx.com", + "base": "http://other.com/", + "failure": true + }, + "Test name prepping, fullwidth input should be converted to ASCII and NOT IDN-ized. This is 'Go' in fullwidth UTF-8/UTF-16.", + { + "input": "http://Go.com", + "base": "http://other.com/", + "href": "http://go.com/", + "origin": "http://go.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "go.com", + "hostname": "go.com", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "URL spec forbids the following. https://www.w3.org/Bugs/Public/show_bug.cgi?id=24257", + { + "input": "http://%41.com", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://%ef%bc%85%ef%bc%94%ef%bc%91.com", + "base": "http://other.com/", + "failure": true + }, + "...%00 in fullwidth should fail (also as escaped UTF-8 input)", + { + "input": "http://%00.com", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://%ef%bc%85%ef%bc%90%ef%bc%90.com", + "base": "http://other.com/", + "failure": true + }, + "Basic IDN support, UTF-8 and UTF-16 input should be converted to IDN", + { + "input": "http://你好你好", + "base": "http://other.com/", + "href": "http://xn--6qqa088eba/", + "origin": "http://你好你好", + "protocol": "http:", + "username": "", + "password": "", + "host": "xn--6qqa088eba", + "hostname": "xn--6qqa088eba", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "Invalid escaped characters should fail and the percents should be escaped. https://www.w3.org/Bugs/Public/show_bug.cgi?id=24191", + { + "input": "http://%zz%66%a.com", + "base": "http://other.com/", + "failure": true + }, + "If we get an invalid character that has been escaped.", + { + "input": "http://%25", + "base": "http://other.com/", + "failure": true + }, + { + "input": "http://hello%00", + "base": "http://other.com/", + "failure": true + }, + "Escaped numbers should be treated like IP addresses if they are.", + { + "input": "http://%30%78%63%30%2e%30%32%35%30.01", + "base": "http://other.com/", + "href": "http://192.168.0.1/", + "origin": "http://192.168.0.1", + "protocol": "http:", + "username": "", + "password": "", + "host": "192.168.0.1", + "hostname": "192.168.0.1", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://%30%78%63%30%2e%30%32%35%30.01%2e", + "base": "http://other.com/", + "href": "http://192.168.0.1/", + "origin": "http://192.168.0.1", + "protocol": "http:", + "username": "", + "password": "", + "host": "192.168.0.1", + "hostname": "192.168.0.1", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "http://192.168.0.257", + "base": "http://other.com/", + "failure": true + }, + "Invalid escaping should trigger the regular host error handling", + { + "input": "http://%3g%78%63%30%2e%30%32%35%30%2E.01", + "base": "http://other.com/", + "failure": true + }, + "Something that isn't exactly an IP should get treated as a host and spaces escaped", + { + "input": "http://192.168.0.1 hello", + "base": "http://other.com/", + "failure": true + }, + "Fullwidth and escaped UTF-8 fullwidth should still be treated as IP", + { + "input": "http://0Xc0.0250.01", + "base": "http://other.com/", + "href": "http://192.168.0.1/", + "origin": "http://192.168.0.1", + "protocol": "http:", + "username": "", + "password": "", + "host": "192.168.0.1", + "hostname": "192.168.0.1", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "Broken IPv6", + { + "input": "http://[google.com]", + "base": "http://other.com/", + "failure": true + }, + "Misc Unicode", + { + "input": "http://foo:💩@example.com/bar", + "base": "http://other.com/", + "href": "http://foo:%F0%9F%92%A9@example.com/bar", + "origin": "http://example.com", + "protocol": "http:", + "username": "foo", + "password": "%F0%9F%92%A9", + "host": "example.com", + "hostname": "example.com", + "port": "", + "pathname": "/bar", + "search": "", + "hash": "" + }, + "# resolving a fragment against any scheme succeeds", + { + "input": "#", + "base": "test:test", + "href": "test:test#", + "origin": "null", + "protocol": "test:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "test", + "search": "", + "hash": "" + }, + { + "input": "#x", + "base": "mailto:x@x.com", + "href": "mailto:x@x.com#x", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "x@x.com", + "search": "", + "hash": "#x" + }, + { + "input": "#x", + "base": "data:,", + "href": "data:,#x", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": ",", + "search": "", + "hash": "#x" + }, + { + "input": "#x", + "base": "about:blank", + "href": "about:blank#x", + "origin": "null", + "protocol": "about:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "blank", + "search": "", + "hash": "#x" + }, + { + "input": "#", + "base": "test:test?test", + "href": "test:test?test#", + "origin": "null", + "protocol": "test:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "test", + "search": "?test", + "hash": "" + }, + "# multiple @ in authority state", + { + "input": "https://@test@test@example:800/", + "base": "http://doesnotmatter/", + "href": "https://%40test%40test@example:800/", + "origin": "https://example:800", + "protocol": "https:", + "username": "%40test%40test", + "password": "", + "host": "example:800", + "hostname": "example", + "port": "800", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "https://@@@example", + "base": "http://doesnotmatter/", + "href": "https://%40%40@example/", + "origin": "https://example", + "protocol": "https:", + "username": "%40%40", + "password": "", + "host": "example", + "hostname": "example", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "non-az-09 characters", + { + "input": "http://`{}:`{}@h/`{}?`{}", + "base": "http://doesnotmatter/", + "href": "http://%60%7B%7D:%60%7B%7D@h/%60%7B%7D?`{}", + "origin": "http://h", + "protocol": "http:", + "username": "%60%7B%7D", + "password": "%60%7B%7D", + "host": "h", + "hostname": "h", + "port": "", + "pathname": "/%60%7B%7D", + "search": "?`{}", + "hash": "" + }, + "# Credentials in base", + { + "input": "/some/path", + "base": "http://user@example.org/smth", + "href": "http://user@example.org/some/path", + "origin": "http://example.org", + "protocol": "http:", + "username": "user", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/some/path", + "search": "", + "hash": "" + }, + { + "input": "", + "base": "http://user:pass@example.org:21/smth", + "href": "http://user:pass@example.org:21/smth", + "origin": "http://example.org:21", + "protocol": "http:", + "username": "user", + "password": "pass", + "host": "example.org:21", + "hostname": "example.org", + "port": "21", + "pathname": "/smth", + "search": "", + "hash": "" + }, + { + "input": "/some/path", + "base": "http://user:pass@example.org:21/smth", + "href": "http://user:pass@example.org:21/some/path", + "origin": "http://example.org:21", + "protocol": "http:", + "username": "user", + "password": "pass", + "host": "example.org:21", + "hostname": "example.org", + "port": "21", + "pathname": "/some/path", + "search": "", + "hash": "" + }, + "# a set of tests designed by zcorpan for relative URLs with unknown schemes", + { + "input": "i", + "base": "sc:sd", + "failure": true + }, + { + "input": "i", + "base": "sc:sd/sd", + "failure": true + }, + { + "input": "i", + "base": "sc:/pa/pa", + "href": "sc:/pa/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/i", + "search": "", + "hash": "" + }, + { + "input": "i", + "base": "sc://ho/pa", + "href": "sc://ho/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "ho", + "hostname": "ho", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "i", + "base": "sc:///pa/pa", + "href": "sc:///pa/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/i", + "search": "", + "hash": "" + }, + { + "input": "../i", + "base": "sc:sd", + "failure": true + }, + { + "input": "../i", + "base": "sc:sd/sd", + "failure": true + }, + { + "input": "../i", + "base": "sc:/pa/pa", + "href": "sc:/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "../i", + "base": "sc://ho/pa", + "href": "sc://ho/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "ho", + "hostname": "ho", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "../i", + "base": "sc:///pa/pa", + "href": "sc:///i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "/i", + "base": "sc:sd", + "failure": true + }, + { + "input": "/i", + "base": "sc:sd/sd", + "failure": true + }, + { + "input": "/i", + "base": "sc:/pa/pa", + "href": "sc:/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "/i", + "base": "sc://ho/pa", + "href": "sc://ho/i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "ho", + "hostname": "ho", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "/i", + "base": "sc:///pa/pa", + "href": "sc:///i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/i", + "search": "", + "hash": "" + }, + { + "input": "?i", + "base": "sc:sd", + "failure": true + }, + { + "input": "?i", + "base": "sc:sd/sd", + "failure": true + }, + { + "input": "?i", + "base": "sc:/pa/pa", + "href": "sc:/pa/pa?i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/pa", + "search": "?i", + "hash": "" + }, + { + "input": "?i", + "base": "sc://ho/pa", + "href": "sc://ho/pa?i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "ho", + "hostname": "ho", + "port": "", + "pathname": "/pa", + "search": "?i", + "hash": "" + }, + { + "input": "?i", + "base": "sc:///pa/pa", + "href": "sc:///pa/pa?i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/pa", + "search": "?i", + "hash": "" + }, + { + "input": "#i", + "base": "sc:sd", + "href": "sc:sd#i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "sd", + "search": "", + "hash": "#i" + }, + { + "input": "#i", + "base": "sc:sd/sd", + "href": "sc:sd/sd#i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "sd/sd", + "search": "", + "hash": "#i" + }, + { + "input": "#i", + "base": "sc:/pa/pa", + "href": "sc:/pa/pa#i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/pa", + "search": "", + "hash": "#i" + }, + { + "input": "#i", + "base": "sc://ho/pa", + "href": "sc://ho/pa#i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "ho", + "hostname": "ho", + "port": "", + "pathname": "/pa", + "search": "", + "hash": "#i" + }, + { + "input": "#i", + "base": "sc:///pa/pa", + "href": "sc:///pa/pa#i", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/pa/pa", + "search": "", + "hash": "#i" + }, + "# make sure that relative URL logic works on known typically non-relative schemes too", + { + "input": "about:/../", + "base": "about:blank", + "href": "about:/", + "origin": "null", + "protocol": "about:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "data:/../", + "base": "about:blank", + "href": "data:/", + "origin": "null", + "protocol": "data:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "javascript:/../", + "base": "about:blank", + "href": "javascript:/", + "origin": "null", + "protocol": "javascript:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + { + "input": "mailto:/../", + "base": "about:blank", + "href": "mailto:/", + "origin": "null", + "protocol": "mailto:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "# unknown schemes and non-ASCII domains", + { + "input": "sc://ñ.test/", + "base": "about:blank", + "href": "sc://xn--ida.test/", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "xn--ida.test", + "hostname": "xn--ida.test", + "port": "", + "pathname": "/", + "search": "", + "hash": "" + }, + "# unknown schemes and backslashes", + { + "input": "sc:\\../", + "base": "about:blank", + "href": "sc:\\../", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "\\../", + "search": "", + "hash": "" + }, + "# unknown scheme with path looking like a password", + { + "input": "sc::a@example.net", + "base": "about:blank", + "href": "sc::a@example.net", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": ":a@example.net", + "search": "", + "hash": "" + }, + "# tests from jsdom/whatwg-url designed for code coverage", + { + "input": "http://127.0.0.1:10100/relative_import.html", + "base": "about:blank", + "href": "http://127.0.0.1:10100/relative_import.html", + "origin": "http://127.0.0.1:10100", + "protocol": "http:", + "username": "", + "password": "", + "host": "127.0.0.1:10100", + "hostname": "127.0.0.1", + "port": "10100", + "pathname": "/relative_import.html", + "search": "", + "hash": "" + }, + { + "input": "http://facebook.com/?foo=%7B%22abc%22", + "base": "about:blank", + "href": "http://facebook.com/?foo=%7B%22abc%22", + "origin": "http://facebook.com", + "protocol": "http:", + "username": "", + "password": "", + "host": "facebook.com", + "hostname": "facebook.com", + "port": "", + "pathname": "/", + "search": "?foo=%7B%22abc%22", + "hash": "" + }, + { + "input": "https://localhost:3000/jqueryui@1.2.3", + "base": "about:blank", + "href": "https://localhost:3000/jqueryui@1.2.3", + "origin": "https://localhost:3000", + "protocol": "https:", + "username": "", + "password": "", + "host": "localhost:3000", + "hostname": "localhost", + "port": "3000", + "pathname": "/jqueryui@1.2.3", + "search": "", + "hash": "" + }, + "# tab/LF/CR", + { + "input": "h\tt\nt\rp://h\to\ns\rt:9\t0\n0\r0/p\ta\nt\rh?q\tu\ne\rry#f\tr\na\rg", + "base": "about:blank", + "href": "http://host:9000/path?query#frag", + "origin": "http://host:9000", + "protocol": "http:", + "username": "", + "password": "", + "host": "host:9000", + "hostname": "host", + "port": "9000", + "pathname": "/path", + "search": "?query", + "hash": "#frag" + }, + "# Stringification of URL.searchParams", + { + "input": "?a=b&c=d", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar?a=b&c=d", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "?a=b&c=d", + "searchParams": "a=b&c=d", + "hash": "" + }, + { + "input": "??a=b&c=d", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar??a=b&c=d", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "??a=b&c=d", + "searchParams": "%3Fa=b&c=d", + "hash": "" + }, + "# Scheme only", + { + "input": "http:", + "base": "http://example.org/foo/bar", + "href": "http://example.org/foo/bar", + "origin": "http://example.org", + "protocol": "http:", + "username": "", + "password": "", + "host": "example.org", + "hostname": "example.org", + "port": "", + "pathname": "/foo/bar", + "search": "", + "searchParams": "", + "hash": "" + }, + { + "input": "http:", + "base": "https://example.org/foo/bar", + "failure": true + }, + { + "input": "sc:", + "base": "https://example.org/foo/bar", + "href": "sc:", + "origin": "null", + "protocol": "sc:", + "username": "", + "password": "", + "host": "", + "hostname": "", + "port": "", + "pathname": "", + "search": "", + "searchParams": "", + "hash": "" + }, + "# Percent encoding of fragments", + { + "input": "http://foo.bar/baz?qux#foo\bbar", + "base": "about:blank", + "href": "http://foo.bar/baz?qux#foo%08bar", + "origin": "http://foo.bar", + "protocol": "http:", + "username": "", + "password": "", + "host": "foo.bar", + "hostname": "foo.bar", + "port": "", + "pathname": "/baz", + "search": "?qux", + "searchParams": "", + "hash": "#foo%08bar" + }, + "# IPv6 compression and serialization", + { + "input": "http://[fe80:cd00::1257:0:211e:729c]/", + "base": "about:blank", + "href": "http://[fe80:cd00::1257:0:211e:729c]/", + "origin": "http://[fe80:cd00::1257:0:211e:729c]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[fe80:cd00::1257:0:211e:729c]", + "hostname": "[fe80:cd00::1257:0:211e:729c]", + "port": "", + "pathname": "/", + "search": "", + "searchParams": "", + "hash": "" + }, + "# IPv6 compression and serialization: Compress sequences of two or more zeroes", + { + "input": "http://[fe80:cd00:0:0:1257:0:211e:729c]/", + "base": "about:blank", + "href": "http://[fe80:cd00::1257:0:211e:729c]/", + "origin": "http://[fe80:cd00::1257:0:211e:729c]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[fe80:cd00::1257:0:211e:729c]", + "hostname": "[fe80:cd00::1257:0:211e:729c]", + "port": "", + "pathname": "/", + "search": "", + "searchParams": "", + "hash": "" + }, + "# IPv6 compression and serialization: Compress longest sequence of zeroes", + { + "input": "http://[fe80:0:0:1257:0:0:0:cd00]/", + "base": "about:blank", + "href": "http://[fe80:0:0:1257::cd00]/", + "origin": "http://[fe80:0:0:1257::cd00]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[fe80:0:0:1257::cd00]", + "hostname": "[fe80:0:0:1257::cd00]", + "port": "", + "pathname": "/", + "search": "", + "searchParams": "", + "hash": "" + }, + "# IPv6 compression and serialization: Do not compress lone zeroes", + { + "input": "http://[fe80:cd00:0:cde:1257:0:211e:729c]/", + "base": "about:blank", + "href": "http://[fe80:cd00:0:cde:1257:0:211e:729c]/", + "origin": "http://[fe80:cd00:0:cde:1257:0:211e:729c]", + "protocol": "http:", + "username": "", + "password": "", + "host": "[fe80:cd00:0:cde:1257:0:211e:729c]", + "hostname": "[fe80:cd00:0:cde:1257:0:211e:729c]", + "port": "", + "pathname": "/", + "search": "", + "searchParams": "", + "hash": "" + } +] diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/Cargo.toml b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/Cargo.toml new file mode 100644 index 000000000..935b5932c --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/Cargo.toml @@ -0,0 +1,23 @@ +[package] + +name = "url_serde" +version = "0.2.0" +authors = ["The rust-url developers"] + +description = "Serde support for URL types" +documentation = "https://docs.rs/url_serde/" +repository = "https://github.com/servo/rust-url" +readme = "README.md" +keywords = ["url", "serde"] +license = "MIT/Apache-2.0" + +[dependencies] +serde = "1.0" +url = "1.0.0" + +[dev-dependencies] +serde_json = "1.0" +serde_derive = "1.0" + +[lib] +doctest = false diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/README.md b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/README.md new file mode 100644 index 000000000..dea503946 --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/README.md @@ -0,0 +1,11 @@ +Serde support for rust-url types +================================ + +This crate provides wrappers and convenience functions to make `rust-url` and `serde` +work hand in hand. + +Version `0.2` or newer of this crate offer support for `serde 1.0`. +Version `0.1` of this crate offer support for `serde 0.9`. +Versions of `serde` older than `0.9` are natively supported by `rust-url` crate directly. + +For more details, see the crate [documentation](https://docs.rs/url_serde/). \ No newline at end of file diff --git a/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/src/lib.rs b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/src/lib.rs new file mode 100644 index 000000000..3c8d110da --- /dev/null +++ b/collector/compile-benchmarks/cargo/url-1.5.1/url_serde/src/lib.rs @@ -0,0 +1,410 @@ +// Copyright 2017 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/*! + +This crate provides wrappers and convenience functions to make rust-url +and Serde work hand in hand. + +The supported types are: + +* `url::Url` + +# How do I use a data type with a `Url` member with Serde? + +Use the serde attributes `deserialize_with` and `serialize_with`. + +``` +#[derive(serde::Serialize, serde::Deserialize)] +struct MyStruct { + #[serde(serialize_with = "serialize")] + url: Url, +} +``` + +# How do I encode a `Url` value with `serde_json::to_string`? + +Use the `Ser` wrapper. + +``` +serde_json::to_string(&Ser::new(&url)) +``` + +# How do I decode a `Url` value with `serde_json::parse`? + +Use the `De` wrapper. + +``` +serde_json::from_str(r"http:://www.rust-lang.org").map(De::into_inner) +``` + +# How do I send `Url` values as part of an IPC channel? + +Use the `Serde` wrapper. It implements `Deref` and `DerefMut` for convenience. + +``` +ipc::channel::>() +``` +*/ + +#![deny(missing_docs)] +#![deny(unsafe_code)] + +extern crate serde; +#[cfg(test)] #[macro_use] extern crate serde_derive; +#[cfg(test)] extern crate serde_json; +extern crate url; + +use serde::{Deserialize, Serialize, Serializer, Deserializer}; +use std::cmp::PartialEq; +use std::error::Error; +use std::fmt; +use std::io::Write; +use std::ops::{Deref, DerefMut}; +use std::str; +use url::{Url, Host}; + +/// Serialises `value` with a given serializer. +/// +/// This is useful to serialize `rust-url` types used in structure fields or +/// tuple members with `#[serde(serialize_with = "url_serde::serialize")]`. +pub fn serialize(value: &T, serializer: S) -> Result + where S: Serializer, for<'a> Ser<'a, T>: Serialize +{ + Ser::new(value).serialize(serializer) +} + +/// A wrapper to serialize `rust-url` types. +/// +/// This is useful with functions such as `serde_json::to_string`. +/// +/// Values of this type can only be passed to the `serde::Serialize` trait. +#[derive(Debug)] +pub struct Ser<'a, T: 'a>(&'a T); + +impl<'a, T> Ser<'a, T> where Ser<'a, T>: Serialize { + /// Returns a new `Ser` wrapper. + #[inline(always)] + pub fn new(value: &'a T) -> Self { + Ser(value) + } +} + +/// Serializes this URL into a `serde` stream. +impl<'a> Serialize for Ser<'a, Url> { + fn serialize(&self, serializer: S) -> Result where S: Serializer { + serializer.serialize_str(self.0.as_str()) + } +} + +/// Serializes this Option into a `serde` stream. +impl<'a> Serialize for Ser<'a, Option> { + fn serialize(&self, serializer: S) -> Result where S: Serializer { + if let Some(url) = self.0.as_ref() { + serializer.serialize_some(url.as_str()) + } else { + serializer.serialize_none() + } + } +} + +impl<'a, String> Serialize for Ser<'a, Host> where String: AsRef { + fn serialize(&self, serializer: S) -> Result where S: Serializer { + match *self.0 { + Host::Domain(ref s) => serializer.serialize_str(s.as_ref()), + Host::Ipv4(_) | Host::Ipv6(_) => { + // max("101.102.103.104".len(), + // "[1000:1002:1003:1004:1005:1006:101.102.103.104]".len()) + const MAX_LEN: usize = 47; + let mut buffer = [0; MAX_LEN]; + serializer.serialize_str(display_into_buffer(&self.0, &mut buffer)) + } + } + } +} + +/// Like .to_string(), but doesn’t allocate memory for a `String`. +/// +/// Panics if `buffer` is too small. +fn display_into_buffer<'a, T: fmt::Display>(value: &T, buffer: &'a mut [u8]) -> &'a str { + let remaining_len; + { + let mut remaining = &mut *buffer; + write!(remaining, "{}", value).unwrap(); + remaining_len = remaining.len() + } + let written_len = buffer.len() - remaining_len; + let written = &buffer[..written_len]; + + // write! only provides std::fmt::Formatter to Display implementations, + // which has methods write_str and write_char but no method to write arbitrary bytes. + // Therefore, `written` is well-formed in UTF-8. + #[allow(unsafe_code)] + unsafe { + str::from_utf8_unchecked(written) + } +} + +/// Deserialises a `T` value with a given deserializer. +/// +/// This is useful to deserialize Url types used in structure fields or +/// tuple members with `#[serde(deserialize_with = "url_serde::deserialize")]`. +pub fn deserialize<'de, T, D>(deserializer: D) -> Result + where D: Deserializer<'de>, De: Deserialize<'de> +{ + De::deserialize(deserializer).map(De::into_inner) +} + +/// A wrapper to deserialize `rust-url` types. +/// +/// This is useful with functions such as `serde_json::from_str`. +/// +/// Values of this type can only be obtained through +/// the `serde::Deserialize` trait. +#[derive(Debug)] +pub struct De(T); + +impl<'de, T> De where De: serde::Deserialize<'de> { + /// Consumes this wrapper, returning the deserialized value. + #[inline(always)] + pub fn into_inner(self) -> T { + self.0 + } +} + +/// Deserializes this URL from a `serde` stream. +impl<'de> Deserialize<'de> for De { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + let string_representation: String = Deserialize::deserialize(deserializer)?; + Url::parse(&string_representation).map(De).map_err(|err| { + serde::de::Error::custom(err.description()) + }) + } +} + +/// Deserializes this Option from a `serde` stream. +impl<'de> Deserialize<'de> for De> { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + let option_representation: Option = Deserialize::deserialize(deserializer)?; + if let Some(s) = option_representation { + return Url::parse(&s) + .map(Some) + .map(De) + .map_err(|err| {serde::de::Error::custom(err.description())}); + } + Ok(De(None)) + + } +} + +impl<'de> Deserialize<'de> for De { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + let string_representation: String = Deserialize::deserialize(deserializer)?; + Host::parse(&string_representation).map(De).map_err(|err| { + serde::de::Error::custom(err.description()) + }) + } +} + +/// A convenience wrapper to be used as a type parameter, for example when +/// a `Vec` or an `HashMap` need to be passed to serde. +#[derive(Clone, Eq, Hash, PartialEq)] +pub struct Serde(pub T); + +/// A convenience type alias for Serde. +pub type SerdeUrl = Serde; + +impl<'de, T> Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + /// Consumes this wrapper, returning the inner value. + #[inline(always)] + pub fn into_inner(self) -> T { + self.0 + } +} + +impl<'de, T> fmt::Debug for Serde +where T: fmt::Debug, De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { + self.0.fmt(formatter) + } +} + +impl<'de, T> Deref for Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + type Target = T; + + fn deref(&self) -> &T { + &self.0 + } +} + +impl<'de, T> DerefMut for Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + fn deref_mut(&mut self) -> &mut T { + &mut self.0 + } +} + +impl<'de, T: PartialEq> PartialEq for Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + fn eq(&self, other: &T) -> bool { + self.0 == *other + } +} + +impl<'de, T> Deserialize<'de> for Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + fn deserialize(deserializer: D) -> Result + where D: Deserializer<'de> + { + De::deserialize(deserializer).map(De::into_inner).map(Serde) + } +} + +impl<'de, T> Serialize for Serde +where De: Deserialize<'de>, for<'a> Ser<'a, T>: Serialize +{ + fn serialize(&self, serializer: S) -> Result + where S: Serializer + { + Ser(&self.0).serialize(serializer) + } +} + +#[test] +fn test_ser_de_url() { + let url = Url::parse("http://www.test.com/foo/bar?$param=bazz").unwrap(); + let s = serde_json::to_string(&Ser::new(&url)).unwrap(); + let new_url: Url = serde_json::from_str(&s).map(De::into_inner).unwrap(); + assert_eq!(url, new_url); +} + +#[test] +fn test_derive_deserialize_with_for_url() { + #[derive(Deserialize, Debug, Eq, PartialEq)] + struct Test { + #[serde(deserialize_with = "deserialize", rename = "_url_")] + url: Url + } + + let url_str = "http://www.test.com/foo/bar?$param=bazz"; + + let expected = Test { + url: Url::parse(url_str).unwrap() + }; + let json_string = format!(r#"{{"_url_": "{}"}}"#, url_str); + let got: Test = serde_json::from_str(&json_string).unwrap(); + assert_eq!(expected, got); + +} + +#[test] +fn test_derive_deserialize_with_for_option_url() { + #[derive(Deserialize, Debug, Eq, PartialEq)] + struct Test { + #[serde(deserialize_with = "deserialize", rename = "_url_")] + url: Option + } + + let url_str = "http://www.test.com/foo/bar?$param=bazz"; + + let expected = Test { + url: Some(Url::parse(url_str).unwrap()) + }; + let json_string = format!(r#"{{"_url_": "{}"}}"#, url_str); + let got: Test = serde_json::from_str(&json_string).unwrap(); + assert_eq!(expected, got); + + let expected = Test { + url: None + }; + let json_string = r#"{"_url_": null}"#; + let got: Test = serde_json::from_str(&json_string).unwrap(); + assert_eq!(expected, got); +} + +#[test] +fn test_derive_serialize_with_for_url() { + #[derive(Serialize, Debug, Eq, PartialEq)] + struct Test { + #[serde(serialize_with = "serialize", rename = "_url_")] + url: Url + } + + let url_str = "http://www.test.com/foo/bar?$param=bazz"; + + let expected = format!(r#"{{"_url_":"{}"}}"#, url_str); + let input = Test {url: Url::parse(url_str).unwrap()}; + let got = serde_json::to_string(&input).unwrap(); + assert_eq!(expected, got); +} + +#[test] +fn test_derive_serialize_with_for_option_url() { + #[derive(Serialize, Debug, Eq, PartialEq)] + struct Test { + #[serde(serialize_with = "serialize", rename = "_url_")] + url: Option + } + + let url_str = "http://www.test.com/foo/bar?$param=bazz"; + + let expected = format!(r#"{{"_url_":"{}"}}"#, url_str); + let input = Test {url: Some(Url::parse(url_str).unwrap())}; + let got = serde_json::to_string(&input).unwrap(); + assert_eq!(expected, got); + + let expected = format!(r#"{{"_url_":null}}"#); + let input = Test {url: None}; + let got = serde_json::to_string(&input).unwrap(); + assert_eq!(expected, got); +} + +#[test] +fn test_derive_with_for_url() { + #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] + struct Test { + #[serde(with = "self", rename = "_url_")] + url: Url + } + + let url_str = "http://www.test.com/foo/bar?$param=bazz"; + let json_string = format!(r#"{{"_url_":"{}"}}"#, url_str); + + // test deserialization + let expected = Test { + url: Url::parse(url_str).unwrap() + }; + let got: Test = serde_json::from_str(&json_string).unwrap(); + assert_eq!(expected, got); + + // test serialization + let input = Test {url: Url::parse(url_str).unwrap()}; + let got = serde_json::to_string(&input).unwrap(); + assert_eq!(json_string, got); +} + +#[test] +fn test_host() { + for host in &[ + Host::Domain("foo.com".to_owned()), + Host::Ipv4("127.0.0.1".parse().unwrap()), + Host::Ipv6("::1".parse().unwrap()), + ] { + let json = serde_json::to_string(&Ser(host)).unwrap(); + let de: De = serde_json::from_str(&json).unwrap(); + assert_eq!(de.into_inner(), *host) + } +}